You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

443 lines
13KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. static int FUNC(frame_sync_code)(CodedBitstreamContext *ctx, RWContext *rw,
  19. VP9RawFrameHeader *current)
  20. {
  21. uint8_t frame_sync_byte_0 = VP9_FRAME_SYNC_0;
  22. uint8_t frame_sync_byte_1 = VP9_FRAME_SYNC_1;
  23. uint8_t frame_sync_byte_2 = VP9_FRAME_SYNC_2;
  24. int err;
  25. xf(8, frame_sync_byte_0, frame_sync_byte_0, 0);
  26. xf(8, frame_sync_byte_1, frame_sync_byte_1, 0);
  27. xf(8, frame_sync_byte_2, frame_sync_byte_2, 0);
  28. if (frame_sync_byte_0 != VP9_FRAME_SYNC_0 ||
  29. frame_sync_byte_1 != VP9_FRAME_SYNC_1 ||
  30. frame_sync_byte_2 != VP9_FRAME_SYNC_2) {
  31. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid frame sync code: "
  32. "%02x %02x %02x.\n", frame_sync_byte_0,
  33. frame_sync_byte_1, frame_sync_byte_2);
  34. return AVERROR_INVALIDDATA;
  35. }
  36. return 0;
  37. }
  38. static int FUNC(color_config)(CodedBitstreamContext *ctx, RWContext *rw,
  39. VP9RawFrameHeader *current, int profile)
  40. {
  41. CodedBitstreamVP9Context *vp9 = ctx->priv_data;
  42. int err;
  43. if (profile >= 2) {
  44. f(1, ten_or_twelve_bit);
  45. vp9->bit_depth = current->ten_or_twelve_bit ? 12 : 10;
  46. } else
  47. vp9->bit_depth = 8;
  48. f(3, color_space);
  49. if (current->color_space != VP9_CS_RGB) {
  50. f(1, color_range);
  51. if (profile == 1 || profile == 3) {
  52. f(1, subsampling_x);
  53. f(1, subsampling_y);
  54. fixed(1, reserved_zero, 0);
  55. } else {
  56. infer(subsampling_x, 1);
  57. infer(subsampling_y, 1);
  58. }
  59. } else {
  60. infer(color_range, 1);
  61. if (profile == 1 || profile == 3) {
  62. infer(subsampling_x, 0);
  63. infer(subsampling_y, 0);
  64. fixed(1, reserved_zero, 0);
  65. }
  66. }
  67. vp9->subsampling_x = current->subsampling_x;
  68. vp9->subsampling_y = current->subsampling_y;
  69. return 0;
  70. }
  71. static int FUNC(frame_size)(CodedBitstreamContext *ctx, RWContext *rw,
  72. VP9RawFrameHeader *current)
  73. {
  74. CodedBitstreamVP9Context *vp9 = ctx->priv_data;
  75. int err;
  76. f(16, frame_width_minus_1);
  77. f(16, frame_height_minus_1);
  78. vp9->frame_width = current->frame_width_minus_1 + 1;
  79. vp9->frame_height = current->frame_height_minus_1 + 1;
  80. vp9->mi_cols = (vp9->frame_width + 7) >> 3;
  81. vp9->mi_rows = (vp9->frame_height + 7) >> 3;
  82. vp9->sb64_cols = (vp9->mi_cols + 7) >> 3;
  83. vp9->sb64_rows = (vp9->mi_rows + 7) >> 3;
  84. return 0;
  85. }
  86. static int FUNC(render_size)(CodedBitstreamContext *ctx, RWContext *rw,
  87. VP9RawFrameHeader *current)
  88. {
  89. int err;
  90. f(1, render_and_frame_size_different);
  91. if (current->render_and_frame_size_different) {
  92. f(16, render_width_minus_1);
  93. f(16, render_height_minus_1);
  94. }
  95. return 0;
  96. }
  97. static int FUNC(frame_size_with_refs)(CodedBitstreamContext *ctx, RWContext *rw,
  98. VP9RawFrameHeader *current)
  99. {
  100. CodedBitstreamVP9Context *vp9 = ctx->priv_data;
  101. int err, i;
  102. for (i = 0; i < VP9_REFS_PER_FRAME; i++) {
  103. fs(1, found_ref[i], 1, i);
  104. if (current->found_ref[i]) {
  105. VP9ReferenceFrameState *ref =
  106. &vp9->ref[current->ref_frame_idx[i]];
  107. vp9->frame_width = ref->frame_width;
  108. vp9->frame_height = ref->frame_height;
  109. vp9->subsampling_x = ref->subsampling_x;
  110. vp9->subsampling_y = ref->subsampling_y;
  111. vp9->bit_depth = ref->bit_depth;
  112. break;
  113. }
  114. }
  115. if (i >= VP9_REFS_PER_FRAME)
  116. CHECK(FUNC(frame_size)(ctx, rw, current));
  117. else {
  118. vp9->mi_cols = (vp9->frame_width + 7) >> 3;
  119. vp9->mi_rows = (vp9->frame_height + 7) >> 3;
  120. vp9->sb64_cols = (vp9->mi_cols + 7) >> 3;
  121. vp9->sb64_rows = (vp9->mi_rows + 7) >> 3;
  122. }
  123. CHECK(FUNC(render_size)(ctx, rw, current));
  124. return 0;
  125. }
  126. static int FUNC(interpolation_filter)(CodedBitstreamContext *ctx, RWContext *rw,
  127. VP9RawFrameHeader *current)
  128. {
  129. int err;
  130. f(1, is_filter_switchable);
  131. if (!current->is_filter_switchable)
  132. f(2, raw_interpolation_filter_type);
  133. return 0;
  134. }
  135. static int FUNC(loop_filter_params)(CodedBitstreamContext *ctx, RWContext *rw,
  136. VP9RawFrameHeader *current)
  137. {
  138. int err, i;
  139. f(6, loop_filter_level);
  140. f(3, loop_filter_sharpness);
  141. f(1, loop_filter_delta_enabled);
  142. if (current->loop_filter_delta_enabled) {
  143. f(1, loop_filter_delta_update);
  144. if (current->loop_filter_delta_update) {
  145. for (i = 0; i < VP9_MAX_REF_FRAMES; i++) {
  146. fs(1, update_ref_delta[i], 1, i);
  147. if (current->update_ref_delta[i])
  148. ss(6, loop_filter_ref_deltas[i], 1, i);
  149. }
  150. for (i = 0; i < 2; i++) {
  151. fs(1, update_mode_delta[i], 1, i);
  152. if (current->update_mode_delta[i])
  153. ss(6, loop_filter_mode_deltas[i], 1, i);
  154. }
  155. }
  156. }
  157. return 0;
  158. }
  159. static int FUNC(quantization_params)(CodedBitstreamContext *ctx, RWContext *rw,
  160. VP9RawFrameHeader *current)
  161. {
  162. int err;
  163. f(8, base_q_idx);
  164. delta_q(delta_q_y_dc);
  165. delta_q(delta_q_uv_dc);
  166. delta_q(delta_q_uv_ac);
  167. return 0;
  168. }
  169. static int FUNC(segmentation_params)(CodedBitstreamContext *ctx, RWContext *rw,
  170. VP9RawFrameHeader *current)
  171. {
  172. static const uint8_t segmentation_feature_bits[VP9_SEG_LVL_MAX] = { 8, 6, 2, 0 };
  173. static const uint8_t segmentation_feature_signed[VP9_SEG_LVL_MAX] = { 1, 1, 0, 0 };
  174. int err, i, j;
  175. f(1, segmentation_enabled);
  176. if (current->segmentation_enabled) {
  177. f(1, segmentation_update_map);
  178. if (current->segmentation_update_map) {
  179. for (i = 0; i < 7; i++)
  180. prob(segmentation_tree_probs[i], 1, i);
  181. f(1, segmentation_temporal_update);
  182. for (i = 0; i < 3; i++) {
  183. if (current->segmentation_temporal_update)
  184. prob(segmentation_pred_prob[i], 1, i);
  185. else
  186. infer(segmentation_pred_prob[i], 255);
  187. }
  188. }
  189. f(1, segmentation_update_data);
  190. if (current->segmentation_update_data) {
  191. f(1, segmentation_abs_or_delta_update);
  192. for (i = 0; i < VP9_MAX_SEGMENTS; i++) {
  193. for (j = 0; j < VP9_SEG_LVL_MAX; j++) {
  194. fs(1, feature_enabled[i][j], 2, i, j);
  195. if (current->feature_enabled[i][j] &&
  196. segmentation_feature_bits[j]) {
  197. fs(segmentation_feature_bits[j],
  198. feature_value[i][j], 2, i, j);
  199. if (segmentation_feature_signed[j])
  200. fs(1, feature_sign[i][j], 2, i, j);
  201. else
  202. infer(feature_sign[i][j], 0);
  203. } else {
  204. infer(feature_value[i][j], 0);
  205. infer(feature_sign[i][j], 0);
  206. }
  207. }
  208. }
  209. }
  210. }
  211. return 0;
  212. }
  213. static int FUNC(tile_info)(CodedBitstreamContext *ctx, RWContext *rw,
  214. VP9RawFrameHeader *current)
  215. {
  216. CodedBitstreamVP9Context *vp9 = ctx->priv_data;
  217. int min_log2_tile_cols, max_log2_tile_cols;
  218. int err;
  219. min_log2_tile_cols = 0;
  220. while ((VP9_MAX_TILE_WIDTH_B64 << min_log2_tile_cols) < vp9->sb64_cols)
  221. ++min_log2_tile_cols;
  222. max_log2_tile_cols = 0;
  223. while ((vp9->sb64_cols >> (max_log2_tile_cols + 1)) >= VP9_MIN_TILE_WIDTH_B64)
  224. ++max_log2_tile_cols;
  225. increment(tile_cols_log2, min_log2_tile_cols, max_log2_tile_cols);
  226. increment(tile_rows_log2, 0, 2);
  227. return 0;
  228. }
  229. static int FUNC(uncompressed_header)(CodedBitstreamContext *ctx, RWContext *rw,
  230. VP9RawFrameHeader *current)
  231. {
  232. CodedBitstreamVP9Context *vp9 = ctx->priv_data;
  233. int err, i;
  234. f(2, frame_marker);
  235. f(1, profile_low_bit);
  236. f(1, profile_high_bit);
  237. vp9->profile = (current->profile_high_bit << 1) + current->profile_low_bit;
  238. if (vp9->profile == 3)
  239. fixed(1, reserved_zero, 0);
  240. f(1, show_existing_frame);
  241. if (current->show_existing_frame) {
  242. f(3, frame_to_show_map_idx);
  243. infer(header_size_in_bytes, 0);
  244. infer(refresh_frame_flags, 0x00);
  245. infer(loop_filter_level, 0);
  246. return 0;
  247. }
  248. f(1, frame_type);
  249. f(1, show_frame);
  250. f(1, error_resilient_mode);
  251. if (current->frame_type == VP9_KEY_FRAME) {
  252. CHECK(FUNC(frame_sync_code)(ctx, rw, current));
  253. CHECK(FUNC(color_config)(ctx, rw, current, vp9->profile));
  254. CHECK(FUNC(frame_size)(ctx, rw, current));
  255. CHECK(FUNC(render_size)(ctx, rw, current));
  256. infer(refresh_frame_flags, 0xff);
  257. } else {
  258. if (current->show_frame == 0)
  259. f(1, intra_only);
  260. else
  261. infer(intra_only, 0);
  262. if (current->error_resilient_mode == 0)
  263. f(2, reset_frame_context);
  264. else
  265. infer(reset_frame_context, 0);
  266. if (current->intra_only == 1) {
  267. CHECK(FUNC(frame_sync_code)(ctx, rw, current));
  268. if (vp9->profile > 0) {
  269. CHECK(FUNC(color_config)(ctx, rw, current, vp9->profile));
  270. } else {
  271. infer(color_space, 1);
  272. infer(subsampling_x, 1);
  273. infer(subsampling_y, 1);
  274. vp9->bit_depth = 8;
  275. vp9->subsampling_x = current->subsampling_x;
  276. vp9->subsampling_y = current->subsampling_y;
  277. }
  278. f(8, refresh_frame_flags);
  279. CHECK(FUNC(frame_size)(ctx, rw, current));
  280. CHECK(FUNC(render_size)(ctx, rw, current));
  281. } else {
  282. f(8, refresh_frame_flags);
  283. for (i = 0; i < VP9_REFS_PER_FRAME; i++) {
  284. fs(3, ref_frame_idx[i], 1, i);
  285. fs(1, ref_frame_sign_bias[VP9_LAST_FRAME + i],
  286. 1, VP9_LAST_FRAME + i);
  287. }
  288. CHECK(FUNC(frame_size_with_refs)(ctx, rw, current));
  289. f(1, allow_high_precision_mv);
  290. CHECK(FUNC(interpolation_filter)(ctx, rw, current));
  291. }
  292. }
  293. if (current->error_resilient_mode == 0) {
  294. f(1, refresh_frame_context);
  295. f(1, frame_parallel_decoding_mode);
  296. } else {
  297. infer(refresh_frame_context, 0);
  298. infer(frame_parallel_decoding_mode, 1);
  299. }
  300. f(2, frame_context_idx);
  301. CHECK(FUNC(loop_filter_params)(ctx, rw, current));
  302. CHECK(FUNC(quantization_params)(ctx, rw, current));
  303. CHECK(FUNC(segmentation_params)(ctx, rw, current));
  304. CHECK(FUNC(tile_info)(ctx, rw, current));
  305. f(16, header_size_in_bytes);
  306. for (i = 0; i < VP9_NUM_REF_FRAMES; i++) {
  307. if (current->refresh_frame_flags & (1 << i)) {
  308. vp9->ref[i] = (VP9ReferenceFrameState) {
  309. .frame_width = vp9->frame_width,
  310. .frame_height = vp9->frame_height,
  311. .subsampling_x = vp9->subsampling_x,
  312. .subsampling_y = vp9->subsampling_y,
  313. .bit_depth = vp9->bit_depth,
  314. };
  315. }
  316. }
  317. av_log(ctx->log_ctx, AV_LOG_DEBUG, "Frame: size %dx%d "
  318. "subsample %dx%d bit_depth %d tiles %dx%d.\n",
  319. vp9->frame_width, vp9->frame_height,
  320. vp9->subsampling_x, vp9->subsampling_y,
  321. vp9->bit_depth, 1 << current->tile_cols_log2,
  322. 1 << current->tile_rows_log2);
  323. return 0;
  324. }
  325. static int FUNC(trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw)
  326. {
  327. int err;
  328. av_unused int zero = 0;
  329. while (byte_alignment(rw) != 0)
  330. xf(1, zero_bit, zero, 0);
  331. return 0;
  332. }
  333. static int FUNC(frame)(CodedBitstreamContext *ctx, RWContext *rw,
  334. VP9RawFrame *current)
  335. {
  336. int err;
  337. HEADER("Frame");
  338. CHECK(FUNC(uncompressed_header)(ctx, rw, &current->header));
  339. CHECK(FUNC(trailing_bits)(ctx, rw));
  340. return 0;
  341. }
  342. static int FUNC(superframe_index)(CodedBitstreamContext *ctx, RWContext *rw,
  343. VP9RawSuperframeIndex *current)
  344. {
  345. int err, i;
  346. HEADER("Superframe Index");
  347. f(3, superframe_marker);
  348. f(2, bytes_per_framesize_minus_1);
  349. f(3, frames_in_superframe_minus_1);
  350. for (i = 0; i <= current->frames_in_superframe_minus_1; i++) {
  351. // Surprise little-endian!
  352. fle(8 * (current->bytes_per_framesize_minus_1 + 1),
  353. frame_sizes[i], 1, i);
  354. }
  355. f(3, superframe_marker);
  356. f(2, bytes_per_framesize_minus_1);
  357. f(3, frames_in_superframe_minus_1);
  358. return 0;
  359. }