1// Copyright (c) 2017-2022, The rav1e contributors. All rights reserved
2//
3// This source code is subject to the terms of the BSD 2 Clause License and
4// the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
5// was not distributed with this source code in the LICENSE file, you can
6// obtain it at www.aomedia.org/license/software. If the Alliance for Open
7// Media Patent License 1.0 was not distributed with this source code in the
8// PATENTS file, you can obtain it at www.aomedia.org/license/patent.
9
10#![allow(non_camel_case_types)]
11#![allow(dead_code)]
12
13use self::BlockSize::*;
14use self::TxSize::*;
15use crate::context::*;
16use crate::frame::*;
17use crate::predict::*;
18use crate::recon_intra::*;
19use crate::serialize::{Deserialize, Serialize};
20use crate::tiling::*;
21use crate::transform::TxSize;
22use crate::util::*;
23use thiserror::Error;
24
25use std::mem::transmute;
26use std::mem::MaybeUninit;
27
28// LAST_FRAME through ALTREF_FRAME correspond to slots 0-6.
29#[derive(PartialEq, Eq, PartialOrd, Copy, Clone, Debug)]
30pub enum RefType {
31 INTRA_FRAME = 0,
32 LAST_FRAME = 1,
33 LAST2_FRAME = 2,
34 LAST3_FRAME = 3,
35 GOLDEN_FRAME = 4,
36 BWDREF_FRAME = 5,
37 ALTREF2_FRAME = 6,
38 ALTREF_FRAME = 7,
39 NONE_FRAME = 8,
40}
41
42impl RefType {
43 /// convert to a ref list index, 0-6 (`INTER_REFS_PER_FRAME`)
44 ///
45 /// # Panics
46 ///
47 /// - If the ref type is a None or Intra frame
48 #[inline]
49 pub fn to_index(self) -> usize {
50 match self {
51 NONE_FRAME => {
52 panic!("Tried to get slot of NONE_FRAME");
53 }
54 INTRA_FRAME => {
55 panic!("Tried to get slot of INTRA_FRAME");
56 }
57 _ => (self as usize) - 1,
58 }
59 }
60 #[inline]
61 pub const fn is_fwd_ref(self) -> bool {
62 (self as usize) < 5
63 }
64 #[inline]
65 pub const fn is_bwd_ref(self) -> bool {
66 (self as usize) >= 5
67 }
68}
69
70use self::RefType::*;
71use std::fmt;
72use std::fmt::Display;
73
74pub const ALL_INTER_REFS: [RefType; 7] = [
75 LAST_FRAME,
76 LAST2_FRAME,
77 LAST3_FRAME,
78 GOLDEN_FRAME,
79 BWDREF_FRAME,
80 ALTREF2_FRAME,
81 ALTREF_FRAME,
82];
83
84pub const LAST_LAST2_FRAMES: usize = 0; // { LAST_FRAME, LAST2_FRAME }
85pub const LAST_LAST3_FRAMES: usize = 1; // { LAST_FRAME, LAST3_FRAME }
86pub const LAST_GOLDEN_FRAMES: usize = 2; // { LAST_FRAME, GOLDEN_FRAME }
87pub const BWDREF_ALTREF_FRAMES: usize = 3; // { BWDREF_FRAME, ALTREF_FRAME }
88pub const LAST2_LAST3_FRAMES: usize = 4; // { LAST2_FRAME, LAST3_FRAME }
89pub const LAST2_GOLDEN_FRAMES: usize = 5; // { LAST2_FRAME, GOLDEN_FRAME }
90pub const LAST3_GOLDEN_FRAMES: usize = 6; // { LAST3_FRAME, GOLDEN_FRAME }
91pub const BWDREF_ALTREF2_FRAMES: usize = 7; // { BWDREF_FRAME, ALTREF2_FRAME }
92pub const ALTREF2_ALTREF_FRAMES: usize = 8; // { ALTREF2_FRAME, ALTREF_FRAME }
93pub const TOTAL_UNIDIR_COMP_REFS: usize = 9;
94
95// NOTE: UNIDIR_COMP_REFS is the number of uni-directional reference pairs
96// that are explicitly signaled.
97pub const UNIDIR_COMP_REFS: usize = BWDREF_ALTREF_FRAMES + 1;
98
99pub const FWD_REFS: usize = 4;
100pub const BWD_REFS: usize = 3;
101pub const SINGLE_REFS: usize = 7;
102pub const TOTAL_REFS_PER_FRAME: usize = 8;
103pub const INTER_REFS_PER_FRAME: usize = 7;
104pub const TOTAL_COMP_REFS: usize =
105 FWD_REFS * BWD_REFS + TOTAL_UNIDIR_COMP_REFS;
106
107pub const REF_FRAMES_LOG2: usize = 3;
108pub const REF_FRAMES: usize = 1 << REF_FRAMES_LOG2;
109
110pub const REF_CONTEXTS: usize = 3;
111pub const MVREF_ROW_COLS: usize = 3;
112
113#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Debug)]
114pub enum PartitionType {
115 PARTITION_NONE,
116 PARTITION_HORZ,
117 PARTITION_VERT,
118 PARTITION_SPLIT,
119 PARTITION_HORZ_A, // HORZ split and the top partition is split again
120 PARTITION_HORZ_B, // HORZ split and the bottom partition is split again
121 PARTITION_VERT_A, // VERT split and the left partition is split again
122 PARTITION_VERT_B, // VERT split and the right partition is split again
123 PARTITION_HORZ_4, // 4:1 horizontal partition
124 PARTITION_VERT_4, // 4:1 vertical partition
125 PARTITION_INVALID,
126}
127
128#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
129pub enum BlockSize {
130 BLOCK_4X4,
131 BLOCK_4X8,
132 BLOCK_8X4,
133 BLOCK_8X8,
134 BLOCK_8X16,
135 BLOCK_16X8,
136 BLOCK_16X16,
137 BLOCK_16X32,
138 BLOCK_32X16,
139 BLOCK_32X32,
140 BLOCK_32X64,
141 BLOCK_64X32,
142 BLOCK_64X64,
143 BLOCK_64X128,
144 BLOCK_128X64,
145 BLOCK_128X128,
146 BLOCK_4X16,
147 BLOCK_16X4,
148 BLOCK_8X32,
149 BLOCK_32X8,
150 BLOCK_16X64,
151 BLOCK_64X16,
152}
153
154#[derive(Debug, Error, Copy, Clone, Eq, PartialEq)]
155pub struct InvalidBlockSize;
156
157impl Display for InvalidBlockSize {
158 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
159 f.write_str(data:"invalid block size")
160 }
161}
162
163impl PartialOrd for BlockSize {
164 #[inline(always)]
165 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
166 use std::cmp::Ordering::{Equal, Greater, Less};
167 match (
168 self.width().cmp(&other.width()),
169 self.height().cmp(&other.height()),
170 ) {
171 (Greater, Less) | (Less, Greater) => None,
172 (Equal, Equal) => Some(Equal),
173 (Greater, _) | (_, Greater) => Some(Greater),
174 (Less, _) | (_, Less) => Some(Less),
175 }
176 }
177}
178
179#[cfg(test)]
180impl Default for BlockSize {
181 fn default() -> Self {
182 BlockSize::BLOCK_64X64
183 }
184}
185
186impl BlockSize {
187 pub const BLOCK_SIZES_ALL: usize = 22;
188 pub const BLOCK_SIZES: usize = BlockSize::BLOCK_SIZES_ALL - 6; // BLOCK_SIZES_ALL minus 4:1 non-squares, six of them
189
190 #[inline]
191 /// # Errors
192 ///
193 /// - Returns `InvalidBlockSize` if the given `w` and `h` do not produce
194 /// a valid block size.
195 pub fn from_width_and_height_opt(
196 w: usize, h: usize,
197 ) -> Result<BlockSize, InvalidBlockSize> {
198 match (w, h) {
199 (4, 4) => Ok(BLOCK_4X4),
200 (4, 8) => Ok(BLOCK_4X8),
201 (4, 16) => Ok(BLOCK_4X16),
202 (8, 4) => Ok(BLOCK_8X4),
203 (8, 8) => Ok(BLOCK_8X8),
204 (8, 16) => Ok(BLOCK_8X16),
205 (8, 32) => Ok(BLOCK_8X32),
206 (16, 4) => Ok(BLOCK_16X4),
207 (16, 8) => Ok(BLOCK_16X8),
208 (16, 16) => Ok(BLOCK_16X16),
209 (16, 32) => Ok(BLOCK_16X32),
210 (16, 64) => Ok(BLOCK_16X64),
211 (32, 8) => Ok(BLOCK_32X8),
212 (32, 16) => Ok(BLOCK_32X16),
213 (32, 32) => Ok(BLOCK_32X32),
214 (32, 64) => Ok(BLOCK_32X64),
215 (64, 16) => Ok(BLOCK_64X16),
216 (64, 32) => Ok(BLOCK_64X32),
217 (64, 64) => Ok(BLOCK_64X64),
218 (64, 128) => Ok(BLOCK_64X128),
219 (128, 64) => Ok(BLOCK_128X64),
220 (128, 128) => Ok(BLOCK_128X128),
221 _ => Err(InvalidBlockSize),
222 }
223 }
224
225 /// # Panics
226 ///
227 /// - If the given `w` and `h` do not produce a valid block size.
228 pub fn from_width_and_height(w: usize, h: usize) -> BlockSize {
229 Self::from_width_and_height_opt(w, h).unwrap()
230 }
231
232 #[inline]
233 pub fn cfl_allowed(self) -> bool {
234 // TODO: fix me when enabling EXT_PARTITION_TYPES
235 self <= BlockSize::BLOCK_32X32
236 }
237
238 #[inline]
239 pub const fn width(self) -> usize {
240 1 << self.width_log2()
241 }
242
243 /// width * height
244 #[inline]
245 pub const fn area(self) -> usize {
246 self.width() * self.height()
247 }
248
249 #[inline]
250 pub const fn width_log2(self) -> usize {
251 match self {
252 BLOCK_4X4 | BLOCK_4X8 | BLOCK_4X16 => 2,
253 BLOCK_8X4 | BLOCK_8X8 | BLOCK_8X16 | BLOCK_8X32 => 3,
254 BLOCK_16X4 | BLOCK_16X8 | BLOCK_16X16 | BLOCK_16X32 | BLOCK_16X64 => 4,
255 BLOCK_32X8 | BLOCK_32X16 | BLOCK_32X32 | BLOCK_32X64 => 5,
256 BLOCK_64X16 | BLOCK_64X32 | BLOCK_64X64 | BLOCK_64X128 => 6,
257 BLOCK_128X64 | BLOCK_128X128 => 7,
258 }
259 }
260
261 #[inline]
262 pub const fn width_mi_log2(self) -> usize {
263 self.width_log2() - 2
264 }
265
266 #[inline]
267 pub const fn width_mi(self) -> usize {
268 self.width() >> MI_SIZE_LOG2
269 }
270
271 #[inline]
272 pub fn width_imp_b(self) -> usize {
273 (self.width() >> (IMPORTANCE_BLOCK_TO_BLOCK_SHIFT + BLOCK_TO_PLANE_SHIFT))
274 .max(1)
275 }
276
277 #[inline]
278 pub const fn height(self) -> usize {
279 1 << self.height_log2()
280 }
281
282 #[inline]
283 pub const fn height_log2(self) -> usize {
284 match self {
285 BLOCK_4X4 | BLOCK_8X4 | BLOCK_16X4 => 2,
286 BLOCK_4X8 | BLOCK_8X8 | BLOCK_16X8 | BLOCK_32X8 => 3,
287 BLOCK_4X16 | BLOCK_8X16 | BLOCK_16X16 | BLOCK_32X16 | BLOCK_64X16 => 4,
288 BLOCK_8X32 | BLOCK_16X32 | BLOCK_32X32 | BLOCK_64X32 => 5,
289 BLOCK_16X64 | BLOCK_32X64 | BLOCK_64X64 | BLOCK_128X64 => 6,
290 BLOCK_64X128 | BLOCK_128X128 => 7,
291 }
292 }
293
294 #[inline]
295 pub const fn height_mi_log2(self) -> usize {
296 self.height_log2() - 2
297 }
298
299 #[inline]
300 pub const fn height_mi(self) -> usize {
301 self.height() >> MI_SIZE_LOG2
302 }
303
304 #[inline]
305 pub fn height_imp_b(self) -> usize {
306 (self.height() >> (IMPORTANCE_BLOCK_TO_BLOCK_SHIFT + BLOCK_TO_PLANE_SHIFT))
307 .max(1)
308 }
309
310 #[inline]
311 pub const fn tx_size(self) -> TxSize {
312 match self {
313 BLOCK_4X4 => TX_4X4,
314 BLOCK_4X8 => TX_4X8,
315 BLOCK_8X4 => TX_8X4,
316 BLOCK_8X8 => TX_8X8,
317 BLOCK_8X16 => TX_8X16,
318 BLOCK_16X8 => TX_16X8,
319 BLOCK_16X16 => TX_16X16,
320 BLOCK_16X32 => TX_16X32,
321 BLOCK_32X16 => TX_32X16,
322 BLOCK_32X32 => TX_32X32,
323 BLOCK_32X64 => TX_32X64,
324 BLOCK_64X32 => TX_64X32,
325 BLOCK_4X16 => TX_4X16,
326 BLOCK_16X4 => TX_16X4,
327 BLOCK_8X32 => TX_8X32,
328 BLOCK_32X8 => TX_32X8,
329 BLOCK_16X64 => TX_16X64,
330 BLOCK_64X16 => TX_64X16,
331 _ => TX_64X64,
332 }
333 }
334
335 /// Source: `Subsampled_Size` (AV1 specification section 5.11.38)
336 ///
337 /// # Errors
338 ///
339 /// - Returns `InvalidBlockSize` if the given block size cannot
340 /// be subsampled in the requested way.
341 #[inline]
342 pub const fn subsampled_size(
343 self, xdec: usize, ydec: usize,
344 ) -> Result<BlockSize, InvalidBlockSize> {
345 Ok(match (xdec, ydec) {
346 (0, 0) /* 4:4:4 */ => self,
347 (1, 0) /* 4:2:2 */ => match self {
348 BLOCK_4X4 | BLOCK_8X4 => BLOCK_4X4,
349 BLOCK_8X8 => BLOCK_4X8,
350 BLOCK_16X4 => BLOCK_8X4,
351 BLOCK_16X8 => BLOCK_8X8,
352 BLOCK_16X16 => BLOCK_8X16,
353 BLOCK_32X8 => BLOCK_16X8,
354 BLOCK_32X16 => BLOCK_16X16,
355 BLOCK_32X32 => BLOCK_16X32,
356 BLOCK_64X16 => BLOCK_32X16,
357 BLOCK_64X32 => BLOCK_32X32,
358 BLOCK_64X64 => BLOCK_32X64,
359 BLOCK_128X64 => BLOCK_64X64,
360 BLOCK_128X128 => BLOCK_64X128,
361 _ => return Err(InvalidBlockSize),
362 },
363 (1, 1) /* 4:2:0 */ => match self {
364 BLOCK_4X4 | BLOCK_4X8 | BLOCK_8X4 | BLOCK_8X8 => BLOCK_4X4,
365 BLOCK_4X16 | BLOCK_8X16 => BLOCK_4X8,
366 BLOCK_8X32 => BLOCK_4X16,
367 BLOCK_16X4 | BLOCK_16X8 => BLOCK_8X4,
368 BLOCK_16X16 => BLOCK_8X8,
369 BLOCK_16X32 => BLOCK_8X16,
370 BLOCK_16X64 => BLOCK_8X32,
371 BLOCK_32X8 => BLOCK_16X4,
372 BLOCK_32X16 => BLOCK_16X8,
373 BLOCK_32X32 => BLOCK_16X16,
374 BLOCK_32X64 => BLOCK_16X32,
375 BLOCK_64X16 => BLOCK_32X8,
376 BLOCK_64X32 => BLOCK_32X16,
377 BLOCK_64X64 => BLOCK_32X32,
378 BLOCK_64X128 => BLOCK_32X64,
379 BLOCK_128X64 => BLOCK_64X32,
380 BLOCK_128X128 => BLOCK_64X64,
381 },
382 _ => return Err(InvalidBlockSize),
383 })
384 }
385
386 /// # Panics
387 ///
388 /// Will panic if the subsampling is not possible
389 #[inline]
390 pub fn largest_chroma_tx_size(self, xdec: usize, ydec: usize) -> TxSize {
391 let plane_bsize = self
392 .subsampled_size(xdec, ydec)
393 .expect("invalid block size for this subsampling mode");
394
395 let chroma_tx_size = max_txsize_rect_lookup[plane_bsize as usize];
396
397 av1_get_coded_tx_size(chroma_tx_size)
398 }
399
400 #[inline]
401 pub const fn is_sqr(self) -> bool {
402 self.width_log2() == self.height_log2()
403 }
404
405 #[inline]
406 pub const fn is_sub8x8(self, xdec: usize, ydec: usize) -> bool {
407 xdec != 0 && self.width_log2() == 2 || ydec != 0 && self.height_log2() == 2
408 }
409
410 #[inline]
411 pub const fn sub8x8_offset(
412 self, xdec: usize, ydec: usize,
413 ) -> (isize, isize) {
414 let offset_x = if xdec != 0 && self.width_log2() == 2 { -1 } else { 0 };
415 let offset_y = if ydec != 0 && self.height_log2() == 2 { -1 } else { 0 };
416
417 (offset_x, offset_y)
418 }
419
420 /// # Errors
421 ///
422 /// - Returns `InvalidBlockSize` if the block size cannot be split
423 /// in the requested way.
424 pub const fn subsize(
425 self, partition: PartitionType,
426 ) -> Result<BlockSize, InvalidBlockSize> {
427 use PartitionType::*;
428
429 Ok(match partition {
430 PARTITION_NONE => self,
431 PARTITION_SPLIT => match self {
432 BLOCK_8X8 => BLOCK_4X4,
433 BLOCK_16X16 => BLOCK_8X8,
434 BLOCK_32X32 => BLOCK_16X16,
435 BLOCK_64X64 => BLOCK_32X32,
436 BLOCK_128X128 => BLOCK_64X64,
437 _ => return Err(InvalidBlockSize),
438 },
439 PARTITION_HORZ | PARTITION_HORZ_A | PARTITION_HORZ_B => match self {
440 BLOCK_8X8 => BLOCK_8X4,
441 BLOCK_16X16 => BLOCK_16X8,
442 BLOCK_32X32 => BLOCK_32X16,
443 BLOCK_64X64 => BLOCK_64X32,
444 BLOCK_128X128 => BLOCK_128X64,
445 _ => return Err(InvalidBlockSize),
446 },
447 PARTITION_VERT | PARTITION_VERT_A | PARTITION_VERT_B => match self {
448 BLOCK_8X8 => BLOCK_4X8,
449 BLOCK_16X16 => BLOCK_8X16,
450 BLOCK_32X32 => BLOCK_16X32,
451 BLOCK_64X64 => BLOCK_32X64,
452 BLOCK_128X128 => BLOCK_64X128,
453 _ => return Err(InvalidBlockSize),
454 },
455 PARTITION_HORZ_4 => match self {
456 BLOCK_16X16 => BLOCK_16X4,
457 BLOCK_32X32 => BLOCK_32X8,
458 BLOCK_64X64 => BLOCK_64X16,
459 _ => return Err(InvalidBlockSize),
460 },
461 PARTITION_VERT_4 => match self {
462 BLOCK_16X16 => BLOCK_4X16,
463 BLOCK_32X32 => BLOCK_8X32,
464 BLOCK_64X64 => BLOCK_16X64,
465 _ => return Err(InvalidBlockSize),
466 },
467 _ => return Err(InvalidBlockSize),
468 })
469 }
470
471 pub const fn is_rect_tx_allowed(self) -> bool {
472 !matches!(
473 self,
474 BLOCK_4X4
475 | BLOCK_8X8
476 | BLOCK_16X16
477 | BLOCK_32X32
478 | BLOCK_64X64
479 | BLOCK_64X128
480 | BLOCK_128X64
481 | BLOCK_128X128
482 )
483 }
484}
485
486impl fmt::Display for BlockSize {
487 fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
488 write!(
489 f,
490 "{}",
491 match self {
492 BlockSize::BLOCK_4X4 => "4x4",
493 BlockSize::BLOCK_4X8 => "4x8",
494 BlockSize::BLOCK_8X4 => "8x4",
495 BlockSize::BLOCK_8X8 => "8x8",
496 BlockSize::BLOCK_8X16 => "8x16",
497 BlockSize::BLOCK_16X8 => "16x8",
498 BlockSize::BLOCK_16X16 => "16x16",
499 BlockSize::BLOCK_16X32 => "16x32",
500 BlockSize::BLOCK_32X16 => "32x16",
501 BlockSize::BLOCK_32X32 => "32x32",
502 BlockSize::BLOCK_32X64 => "32x64",
503 BlockSize::BLOCK_64X32 => "64x32",
504 BlockSize::BLOCK_64X64 => "64x64",
505 BlockSize::BLOCK_64X128 => "64x128",
506 BlockSize::BLOCK_128X64 => "128x64",
507 BlockSize::BLOCK_128X128 => "128x128",
508 BlockSize::BLOCK_4X16 => "4x16",
509 BlockSize::BLOCK_16X4 => "16x4",
510 BlockSize::BLOCK_8X32 => "8x32",
511 BlockSize::BLOCK_32X8 => "32x8",
512 BlockSize::BLOCK_16X64 => "16x64",
513 BlockSize::BLOCK_64X16 => "64x16",
514 }
515 )
516 }
517}
518
519pub const NEWMV_MODE_CONTEXTS: usize = 7;
520pub const GLOBALMV_MODE_CONTEXTS: usize = 2;
521pub const REFMV_MODE_CONTEXTS: usize = 6;
522pub const INTER_COMPOUND_MODES: usize = 8;
523
524pub const REFMV_OFFSET: usize = 4;
525pub const GLOBALMV_OFFSET: usize = 3;
526pub const NEWMV_CTX_MASK: usize = (1 << GLOBALMV_OFFSET) - 1;
527pub const GLOBALMV_CTX_MASK: usize =
528 (1 << (REFMV_OFFSET - GLOBALMV_OFFSET)) - 1;
529pub const REFMV_CTX_MASK: usize = (1 << (8 - REFMV_OFFSET)) - 1;
530
531pub static RAV1E_PARTITION_TYPES: &[PartitionType] = &[
532 PartitionType::PARTITION_NONE,
533 PartitionType::PARTITION_HORZ,
534 PartitionType::PARTITION_VERT,
535 PartitionType::PARTITION_SPLIT,
536];
537
538#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd)]
539pub enum GlobalMVMode {
540 IDENTITY = 0, // identity transformation, 0-parameter
541 TRANSLATION = 1, // translational motion 2-parameter
542 ROTZOOM = 2, // simplified affine with rotation + zoom only, 4-parameter
543 AFFINE = 3, // affine, 6-parameter
544}
545
546#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd)]
547pub enum MvSubpelPrecision {
548 MV_SUBPEL_NONE = -1,
549 MV_SUBPEL_LOW_PRECISION = 0,
550 MV_SUBPEL_HIGH_PRECISION,
551}
552
553/* Symbols for coding which components are zero jointly */
554pub const MV_JOINTS: usize = 4;
555
556#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd)]
557pub enum MvJointType {
558 MV_JOINT_ZERO = 0, /* Zero vector */
559 MV_JOINT_HNZVZ = 1, /* Vert zero, hor nonzero */
560 MV_JOINT_HZVNZ = 2, /* Hor zero, vert nonzero */
561 MV_JOINT_HNZVNZ = 3, /* Both components nonzero */
562}
563
564fn supersample_chroma_bsize(
565 bsize: BlockSize, ss_x: usize, ss_y: usize,
566) -> BlockSize {
567 debug_assert!(ss_x < 2);
568 debug_assert!(ss_y < 2);
569
570 match bsize {
571 BLOCK_4X4 => match (ss_x, ss_y) {
572 (1, 1) => BLOCK_8X8,
573 (1, 0) => BLOCK_8X4,
574 (0, 1) => BLOCK_4X8,
575 _ => bsize,
576 },
577 BLOCK_4X8 => match (ss_x, ss_y) {
578 (1, 1) => BLOCK_8X8,
579 (1, 0) => BLOCK_8X8,
580 (0, 1) => BLOCK_4X8,
581 _ => bsize,
582 },
583 BLOCK_8X4 => match (ss_x, ss_y) {
584 (1, 1) => BLOCK_8X8,
585 (1, 0) => BLOCK_8X4,
586 (0, 1) => BLOCK_8X8,
587 _ => bsize,
588 },
589 BLOCK_4X16 => match (ss_x, ss_y) {
590 (1, 1) => BLOCK_8X16,
591 (1, 0) => BLOCK_8X16,
592 (0, 1) => BLOCK_4X16,
593 _ => bsize,
594 },
595 BLOCK_16X4 => match (ss_x, ss_y) {
596 (1, 1) => BLOCK_16X8,
597 (1, 0) => BLOCK_16X4,
598 (0, 1) => BLOCK_16X8,
599 _ => bsize,
600 },
601 _ => bsize,
602 }
603}
604
605type IntraEdgeBuffer<T> = Aligned<[MaybeUninit<T>; 4 * MAX_TX_SIZE + 1]>;
606
607#[cfg(any(test, feature = "bench"))]
608type IntraEdgeMock<T> = Aligned<[T; 4 * MAX_TX_SIZE + 1]>;
609
610pub struct IntraEdge<'a, T: Pixel>(&'a [T], &'a [T], &'a [T]);
611
612impl<'a, T: Pixel> IntraEdge<'a, T> {
613 fn new(
614 edge_buf: &'a mut IntraEdgeBuffer<T>, init_left: usize, init_above: usize,
615 ) -> Self {
616 // SAFETY: Initialized in `get_intra_edges`.
617 let left = unsafe {
618 let begin_left = 2 * MAX_TX_SIZE - init_left;
619 let end_above = 2 * MAX_TX_SIZE + 1 + init_above;
620 slice_assume_init_mut(&mut edge_buf.data[begin_left..end_above])
621 };
622 let (left, top_left) = left.split_at(init_left);
623 let (top_left, above) = top_left.split_at(1);
624 Self(left, top_left, above)
625 }
626
627 pub const fn as_slices(&self) -> (&'a [T], &'a [T], &'a [T]) {
628 (self.0, self.1, self.2)
629 }
630
631 pub const fn top_left_ptr(&self) -> *const T {
632 self.1.as_ptr()
633 }
634
635 #[cfg(any(test, feature = "bench"))]
636 pub fn mock(edge_buf: &'a IntraEdgeMock<T>) -> Self {
637 let left = &edge_buf.data[..];
638 let (left, top_left) = left.split_at(2 * MAX_TX_SIZE);
639 let (top_left, above) = top_left.split_at(1);
640 Self(left, top_left, above)
641 }
642}
643
644pub fn get_intra_edges<'a, T: Pixel>(
645 edge_buf: &'a mut IntraEdgeBuffer<T>,
646 dst: &PlaneRegion<'_, T>,
647 partition_bo: TileBlockOffset, // partition bo, BlockOffset
648 bx: usize,
649 by: usize,
650 partition_size: BlockSize, // partition size, BlockSize
651 po: PlaneOffset,
652 tx_size: TxSize,
653 bit_depth: usize,
654 opt_mode: Option<PredictionMode>,
655 enable_intra_edge_filter: bool,
656 intra_param: IntraParam,
657) -> IntraEdge<'a, T> {
658 let mut init_left: usize = 0;
659 let mut init_above: usize = 0;
660
661 let plane_cfg = &dst.plane_cfg;
662
663 let base = 128u16 << (bit_depth - 8);
664
665 {
666 // left pixels are ordered from bottom to top and right-aligned
667 let (left, not_left) = edge_buf.data.split_at_mut(2 * MAX_TX_SIZE);
668 let (top_left, above) = not_left.split_at_mut(1);
669
670 let x = po.x as usize;
671 let y = po.y as usize;
672
673 let mut needs_left = true;
674 let mut needs_topleft = true;
675 let mut needs_top = true;
676 let mut needs_topright = true;
677 let mut needs_bottomleft = true;
678 let mut needs_topleft_filter = false;
679
680 if let Some(mut mode) = opt_mode {
681 mode = match mode {
682 PredictionMode::PAETH_PRED => match (x, y) {
683 (0, 0) => PredictionMode::DC_PRED,
684 (0, _) => PredictionMode::V_PRED,
685 (_, 0) => PredictionMode::H_PRED,
686 _ => PredictionMode::PAETH_PRED,
687 },
688 _ => mode,
689 };
690
691 let p_angle = intra_mode_to_angle(mode)
692 + match intra_param {
693 IntraParam::AngleDelta(val) => (val * ANGLE_STEP) as isize,
694 _ => 0,
695 };
696
697 let dc_or_cfl =
698 mode == PredictionMode::DC_PRED || mode == PredictionMode::UV_CFL_PRED;
699
700 needs_left = (!dc_or_cfl || x != 0) || (p_angle > 90 && p_angle != 180);
701 needs_topleft = mode == PredictionMode::PAETH_PRED
702 || (mode.is_directional() && p_angle != 90 && p_angle != 180);
703 needs_top = (!dc_or_cfl || y != 0) || (p_angle != 90 && p_angle < 180);
704 needs_topright = mode.is_directional() && p_angle < 90;
705 needs_bottomleft = mode.is_directional() && p_angle > 180;
706 needs_topleft_filter =
707 enable_intra_edge_filter && p_angle > 90 && p_angle < 180;
708 }
709
710 let rect_w =
711 dst.rect().width.min(dst.plane_cfg.width - dst.rect().x as usize);
712 let rect_h =
713 dst.rect().height.min(dst.plane_cfg.height - dst.rect().y as usize);
714
715 // Needs left
716 if needs_left {
717 let txh = if y + tx_size.height() > rect_h {
718 rect_h - y
719 } else {
720 tx_size.height()
721 };
722 if x != 0 {
723 for i in 0..txh {
724 debug_assert!(y + i < rect_h);
725 left[2 * MAX_TX_SIZE - 1 - i].write(dst[y + i][x - 1]);
726 }
727 if txh < tx_size.height() {
728 let val = dst[y + txh - 1][x - 1];
729 for i in txh..tx_size.height() {
730 left[2 * MAX_TX_SIZE - 1 - i].write(val);
731 }
732 }
733 } else {
734 let val = if y != 0 { dst[y - 1][0] } else { T::cast_from(base + 1) };
735 for v in left[2 * MAX_TX_SIZE - tx_size.height()..].iter_mut() {
736 v.write(val);
737 }
738 }
739 init_left += tx_size.height();
740 }
741
742 // Needs top
743 if needs_top {
744 let txw = if x + tx_size.width() > rect_w {
745 rect_w - x
746 } else {
747 tx_size.width()
748 };
749 if y != 0 {
750 above[..txw].copy_from_slice(
751 // SAFETY: &[T] and &[MaybeUninit<T>] have the same layout
752 unsafe {
753 transmute::<&[T], &[MaybeUninit<T>]>(&dst[y - 1][x..x + txw])
754 },
755 );
756 if txw < tx_size.width() {
757 let val = dst[y - 1][x + txw - 1];
758 for i in txw..tx_size.width() {
759 above[i].write(val);
760 }
761 }
762 } else {
763 let val = if x != 0 { dst[0][x - 1] } else { T::cast_from(base - 1) };
764 for v in above[..tx_size.width()].iter_mut() {
765 v.write(val);
766 }
767 }
768 init_above += tx_size.width();
769 }
770
771 let bx4 = bx * (tx_size.width() >> MI_SIZE_LOG2); // bx,by are in tx block indices
772 let by4 = by * (tx_size.height() >> MI_SIZE_LOG2);
773
774 let have_top = by4 != 0
775 || if plane_cfg.ydec != 0 {
776 partition_bo.0.y > 1
777 } else {
778 partition_bo.0.y > 0
779 };
780 let have_left = bx4 != 0
781 || if plane_cfg.xdec != 0 {
782 partition_bo.0.x > 1
783 } else {
784 partition_bo.0.x > 0
785 };
786
787 let right_available = x + tx_size.width() < rect_w;
788 let bottom_available = y + tx_size.height() < rect_h;
789
790 let scaled_partition_size =
791 supersample_chroma_bsize(partition_size, plane_cfg.xdec, plane_cfg.ydec);
792
793 // Needs top right
794 if needs_topright {
795 debug_assert!(plane_cfg.xdec <= 1 && plane_cfg.ydec <= 1);
796
797 let num_avail = if y != 0
798 && has_top_right(
799 scaled_partition_size,
800 partition_bo,
801 have_top,
802 right_available,
803 tx_size,
804 by4,
805 bx4,
806 plane_cfg.xdec,
807 plane_cfg.ydec,
808 ) {
809 tx_size.width().min(rect_w - x - tx_size.width())
810 } else {
811 0
812 };
813 if num_avail > 0 {
814 above[tx_size.width()..][..num_avail].copy_from_slice(
815 // SAFETY: &[T] and &[MaybeUninit<T>] have the same layout
816 unsafe {
817 transmute::<&[T], &[MaybeUninit<T>]>(
818 &dst[y - 1][x + tx_size.width()..][..num_avail],
819 )
820 },
821 );
822 }
823 if num_avail < tx_size.height() {
824 let val = above[tx_size.width() + num_avail - 1];
825 for v in above
826 [tx_size.width() + num_avail..tx_size.width() + tx_size.height()]
827 .iter_mut()
828 {
829 *v = val;
830 }
831 }
832 init_above += tx_size.height();
833 }
834
835 // SAFETY: The blocks above have initialized the first `init_above` items.
836 let above = unsafe { slice_assume_init_mut(&mut above[..init_above]) };
837
838 // Needs bottom left
839 if needs_bottomleft {
840 debug_assert!(plane_cfg.xdec <= 1 && plane_cfg.ydec <= 1);
841
842 let num_avail = if x != 0
843 && has_bottom_left(
844 scaled_partition_size,
845 partition_bo,
846 bottom_available,
847 have_left,
848 tx_size,
849 by4,
850 bx4,
851 plane_cfg.xdec,
852 plane_cfg.ydec,
853 ) {
854 tx_size.height().min(rect_h - y - tx_size.height())
855 } else {
856 0
857 };
858 if num_avail > 0 {
859 for i in 0..num_avail {
860 left[2 * MAX_TX_SIZE - tx_size.height() - 1 - i]
861 .write(dst[y + tx_size.height() + i][x - 1]);
862 }
863 }
864 if num_avail < tx_size.width() {
865 let val = left[2 * MAX_TX_SIZE - tx_size.height() - num_avail];
866 for v in left[(2 * MAX_TX_SIZE - tx_size.height() - tx_size.width())
867 ..(2 * MAX_TX_SIZE - tx_size.height() - num_avail)]
868 .iter_mut()
869 {
870 *v = val;
871 }
872 }
873 init_left += tx_size.width();
874 }
875
876 // SAFETY: The blocks above have initialized last `init_left` items.
877 let left = unsafe {
878 slice_assume_init_mut(&mut left[2 * MAX_TX_SIZE - init_left..])
879 };
880
881 // Needs top-left
882 if needs_topleft {
883 let top_left = top_left[0].write(match (x, y) {
884 (0, 0) => T::cast_from(base),
885 (_, 0) => dst[0][x - 1],
886 (0, _) => dst[y - 1][0],
887 _ => dst[y - 1][x - 1],
888 });
889
890 let (w, h) = (tx_size.width(), tx_size.height());
891 if needs_topleft_filter && w + h >= 24 {
892 let (l, a, tl): (u32, u32, u32) =
893 (left[left.len() - 1].into(), above[0].into(), (*top_left).into());
894 let s = l * 5 + tl * 6 + a * 5;
895
896 *top_left = T::cast_from((s + (1 << 3)) >> 4);
897 }
898 } else {
899 top_left[0].write(T::cast_from(base));
900 }
901 }
902 IntraEdge::new(edge_buf, init_left, init_above)
903}
904
905pub fn has_tr(bo: TileBlockOffset, bsize: BlockSize) -> bool {
906 let sb_mi_size = BLOCK_64X64.width_mi(); /* Assume 64x64 for now */
907 let mask_row = bo.0.y & LOCAL_BLOCK_MASK;
908 let mask_col = bo.0.x & LOCAL_BLOCK_MASK;
909 let target_n4_w = bsize.width_mi();
910 let target_n4_h = bsize.height_mi();
911
912 let mut bs = target_n4_w.max(target_n4_h);
913
914 if bs > BLOCK_64X64.width_mi() {
915 return false;
916 }
917
918 let mut has_tr = !((mask_row & bs) != 0 && (mask_col & bs) != 0);
919
920 /* TODO: assert its a power of two */
921
922 while bs < sb_mi_size {
923 if (mask_col & bs) != 0 {
924 if (mask_col & (2 * bs) != 0) && (mask_row & (2 * bs) != 0) {
925 has_tr = false;
926 break;
927 }
928 } else {
929 break;
930 }
931 bs <<= 1;
932 }
933
934 /* The left hand of two vertical rectangles always has a top right (as the
935 * block above will have been decoded) */
936 if (target_n4_w < target_n4_h) && (bo.0.x & target_n4_w) == 0 {
937 has_tr = true;
938 }
939
940 /* The bottom of two horizontal rectangles never has a top right (as the block
941 * to the right won't have been decoded) */
942 if (target_n4_w > target_n4_h) && (bo.0.y & target_n4_h) != 0 {
943 has_tr = false;
944 }
945
946 /* The bottom left square of a Vertical A (in the old format) does
947 * not have a top right as it is decoded before the right hand
948 * rectangle of the partition */
949 /*
950 if blk.partition == PartitionType::PARTITION_VERT_A {
951 if blk.n4_w == blk.n4_h {
952 if (mask_row & bs) != 0 {
953 has_tr = false;
954 }
955 }
956 }
957 */
958
959 has_tr
960}
961
962pub fn has_bl(bo: TileBlockOffset, bsize: BlockSize) -> bool {
963 let sb_mi_size = BLOCK_64X64.width_mi(); /* Assume 64x64 for now */
964 let mask_row = bo.0.y & LOCAL_BLOCK_MASK;
965 let mask_col = bo.0.x & LOCAL_BLOCK_MASK;
966 let target_n4_w = bsize.width_mi();
967 let target_n4_h = bsize.height_mi();
968
969 let mut bs = target_n4_w.max(target_n4_h);
970
971 if bs > BLOCK_64X64.width_mi() {
972 return false;
973 }
974
975 let mut has_bl =
976 (mask_row & bs) == 0 && (mask_col & bs) == 0 && bs < sb_mi_size;
977
978 /* TODO: assert its a power of two */
979
980 while 2 * bs < sb_mi_size {
981 if (mask_col & bs) == 0 {
982 if (mask_col & (2 * bs) == 0) && (mask_row & (2 * bs) == 0) {
983 has_bl = true;
984 break;
985 }
986 } else {
987 break;
988 }
989 bs <<= 1;
990 }
991
992 /* The right hand of two vertical rectangles never has a bottom left (as the
993 * block below won't have been decoded) */
994 if (target_n4_w < target_n4_h) && (bo.0.x & target_n4_w) != 0 {
995 has_bl = false;
996 }
997
998 /* The top of two horizontal rectangles always has a bottom left (as the block
999 * to the left will have been decoded) */
1000 if (target_n4_w > target_n4_h) && (bo.0.y & target_n4_h) == 0 {
1001 has_bl = true;
1002 }
1003
1004 /* The bottom left square of a Vertical A (in the old format) does
1005 * not have a top right as it is decoded before the right hand
1006 * rectangle of the partition */
1007 /*
1008 if blk.partition == PartitionType::PARTITION_VERT_A {
1009 if blk.n4_w == blk.n4_h {
1010 if (mask_row & bs) != 0 {
1011 has_tr = false;
1012 }
1013 }
1014 }
1015 */
1016
1017 has_bl
1018}
1019
1020#[cfg(test)]
1021mod tests {
1022 use crate::partition::BlockSize::*;
1023 use crate::partition::{BlockSize, InvalidBlockSize};
1024
1025 #[test]
1026 fn from_wh_matches_naive() {
1027 fn from_wh_opt_naive(
1028 w: usize, h: usize,
1029 ) -> Result<BlockSize, InvalidBlockSize> {
1030 match (w, h) {
1031 (4, 4) => Ok(BLOCK_4X4),
1032 (4, 8) => Ok(BLOCK_4X8),
1033 (8, 4) => Ok(BLOCK_8X4),
1034 (8, 8) => Ok(BLOCK_8X8),
1035 (8, 16) => Ok(BLOCK_8X16),
1036 (16, 8) => Ok(BLOCK_16X8),
1037 (16, 16) => Ok(BLOCK_16X16),
1038 (16, 32) => Ok(BLOCK_16X32),
1039 (32, 16) => Ok(BLOCK_32X16),
1040 (32, 32) => Ok(BLOCK_32X32),
1041 (32, 64) => Ok(BLOCK_32X64),
1042 (64, 32) => Ok(BLOCK_64X32),
1043 (64, 64) => Ok(BLOCK_64X64),
1044 (64, 128) => Ok(BLOCK_64X128),
1045 (128, 64) => Ok(BLOCK_128X64),
1046 (128, 128) => Ok(BLOCK_128X128),
1047 (4, 16) => Ok(BLOCK_4X16),
1048 (16, 4) => Ok(BLOCK_16X4),
1049 (8, 32) => Ok(BLOCK_8X32),
1050 (32, 8) => Ok(BLOCK_32X8),
1051 (16, 64) => Ok(BLOCK_16X64),
1052 (64, 16) => Ok(BLOCK_64X16),
1053 _ => Err(InvalidBlockSize),
1054 }
1055 }
1056
1057 for w in 0..256 {
1058 for h in 0..256 {
1059 let a = BlockSize::from_width_and_height_opt(w, h);
1060 let b = from_wh_opt_naive(w, h);
1061
1062 assert_eq!(a, b);
1063 }
1064 }
1065 }
1066}
1067