38 VP9mv *pmv,
int ref,
int z,
int idx,
int sb)
40 static const int8_t mv_ref_blk_off[
N_BS_SIZES][8][2] = {
41 [
BS_64x64] = { { 3, -1 }, { -1, 3 }, { 4, -1 }, { -1, 4 },
42 { -1, -1 }, { 0, -1 }, { -1, 0 }, { 6, -1 } },
43 [
BS_64x32] = { { 0, -1 }, { -1, 0 }, { 4, -1 }, { -1, 2 },
44 { -1, -1 }, { 0, -3 }, { -3, 0 }, { 2, -1 } },
45 [
BS_32x64] = { { -1, 0 }, { 0, -1 }, { -1, 4 }, { 2, -1 },
46 { -1, -1 }, { -3, 0 }, { 0, -3 }, { -1, 2 } },
47 [
BS_32x32] = { { 1, -1 }, { -1, 1 }, { 2, -1 }, { -1, 2 },
48 { -1, -1 }, { 0, -3 }, { -3, 0 }, { -3, -3 } },
49 [
BS_32x16] = { { 0, -1 }, { -1, 0 }, { 2, -1 }, { -1, -1 },
50 { -1, 1 }, { 0, -3 }, { -3, 0 }, { -3, -3 } },
51 [
BS_16x32] = { { -1, 0 }, { 0, -1 }, { -1, 2 }, { -1, -1 },
52 { 1, -1 }, { -3, 0 }, { 0, -3 }, { -3, -3 } },
53 [
BS_16x16] = { { 0, -1 }, { -1, 0 }, { 1, -1 }, { -1, 1 },
54 { -1, -1 }, { 0, -3 }, { -3, 0 }, { -3, -3 } },
55 [
BS_16x8] = { { 0, -1 }, { -1, 0 }, { 1, -1 }, { -1, -1 },
56 { 0, -2 }, { -2, 0 }, { -2, -1 }, { -1, -2 } },
57 [
BS_8x16] = { { -1, 0 }, { 0, -1 }, { -1, 1 }, { -1, -1 },
58 { -2, 0 }, { 0, -2 }, { -1, -2 }, { -2, -1 } },
59 [
BS_8x8] = { { 0, -1 }, { -1, 0 }, { -1, -1 }, { 0, -2 },
60 { -2, 0 }, { -1, -2 }, { -2, -1 }, { -2, -2 } },
61 [
BS_8x4] = { { 0, -1 }, { -1, 0 }, { -1, -1 }, { 0, -2 },
62 { -2, 0 }, { -1, -2 }, { -2, -1 }, { -2, -2 } },
63 [
BS_4x8] = { { 0, -1 }, { -1, 0 }, { -1, -1 }, { 0, -2 },
64 { -2, 0 }, { -1, -2 }, { -2, -1 }, { -2, -2 } },
65 [
BS_4x4] = { { 0, -1 }, { -1, 0 }, { -1, -1 }, { 0, -2 },
66 { -2, 0 }, { -1, -2 }, { -2, -1 }, { -2, -2 } },
70 int row = td->
row, col = td->
col, row7 = td->
row7;
71 const int8_t (*p)[2] = mv_ref_blk_off[
b->bs];
72 #define INVALID_MV 0x80008000U
76 #define RETURN_DIRECT_MV(mv) \
78 uint32_t m = AV_RN32A(&mv); \
82 } else if (mem == INVALID_MV) { \
84 } else if (m != mem) { \
91 if (sb == 2 || sb == 1) {
99 #define RETURN_MV(mv) \
104 av_assert2(idx == 1); \
105 av_assert2(mem != INVALID_MV); \
106 if (mem_sub8x8 == INVALID_MV) { \
107 clamp_mv(&tmp, &mv, td); \
108 m = AV_RN32A(&tmp); \
113 mem_sub8x8 = AV_RN32A(&mv); \
114 } else if (mem_sub8x8 != AV_RN32A(&mv)) { \
115 clamp_mv(&tmp, &mv, td); \
116 m = AV_RN32A(&tmp); \
126 uint32_t m = AV_RN32A(&mv); \
128 clamp_mv(pmv, &mv, td); \
130 } else if (mem == INVALID_MV) { \
132 } else if (m != mem) { \
133 clamp_mv(pmv, &mv, td); \
142 RETURN_MV(
s->above_mv_ctx[2 * col + (sb & 1)][0]);
143 else if (
mv->ref[1] ==
ref)
144 RETURN_MV(
s->above_mv_ctx[2 * col + (sb & 1)][1]);
148 if (
mv->ref[0] ==
ref)
150 else if (
mv->ref[1] ==
ref)
160 int c = p[
i][0] + col,
r = p[
i][1] + row;
163 r >= 0 && r < s->rows) {
166 if (
mv->ref[0] ==
ref)
168 else if (
mv->ref[1] ==
ref)
174 if (
s->s.h.use_last_frame_mvs) {
179 if (
mv->ref[0] ==
ref)
181 else if (
mv->ref[1] ==
ref)
185 #define RETURN_SCALE_MV(mv, scale) \
188 VP9mv mv_temp = { -mv.x, -mv.y }; \
189 RETURN_MV(mv_temp); \
196 for (
i = 0;
i < 8;
i++) {
197 int c = p[
i][0] + col,
r = p[
i][1] + row;
202 if (
mv->ref[0] !=
ref &&
mv->ref[0] >= 0)
204 s->s.h.signbias[
mv->ref[0]] !=
s->s.h.signbias[
ref]);
205 if (
mv->ref[1] !=
ref &&
mv->ref[1] >= 0 &&
215 if (
s->s.h.use_last_frame_mvs) {
219 if (
mv->ref[0] !=
ref &&
mv->ref[0] >= 0)
221 if (
mv->ref[1] !=
ref &&
mv->ref[1] >= 0 &&
233 #undef RETURN_SCALE_MV
241 s->prob.p.mv_comp[idx].classes);
248 for (n = 0, m = 0; m <
c; m++) {
255 s->prob.p.mv_comp[idx].fp);
273 s->prob.p.mv_comp[idx].class0_fp[n]);
275 n = (n << 3) | (
bit << 1);
288 return sign ? -(n + 1) : (n + 1);
307 !(hp =
s->s.h.highprecisionmvs &&
338 !(hp =
s->s.h.highprecisionmvs &&