2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
12 #include "vp9/common/vp9_onyxc_int.h"
13 #include "vp9/common/vp9_entropymv.h"
15 #define MV_COUNT_SAT 20
16 #define MV_MAX_UPDATE_FACTOR 128
18 /* Integer pel reference mv threshold for use of high-precision 1/8 mv */
19 #define COMPANDED_MVREF_THRESH 8
21 const vp9_tree_index vp9_mv_joint_tree[TREE_SIZE(MV_JOINTS)] = {
24 -MV_JOINT_HZVNZ, -MV_JOINT_HNZVNZ
27 const vp9_tree_index vp9_mv_class_tree[TREE_SIZE(MV_CLASSES)] = {
31 -MV_CLASS_2, -MV_CLASS_3,
33 -MV_CLASS_4, -MV_CLASS_5,
36 -MV_CLASS_7, -MV_CLASS_8,
37 -MV_CLASS_9, -MV_CLASS_10,
40 const vp9_tree_index vp9_mv_class0_tree[TREE_SIZE(CLASS0_SIZE)] = {
44 const vp9_tree_index vp9_mv_fp_tree[TREE_SIZE(MV_FP_SIZE)] = {
50 static const nmv_context default_nmv_context = {
53 { /* vert component */ // NOLINT
55 {224, 144, 192, 168, 192, 176, 192, 198, 198, 245}, /* class */
57 {136, 140, 148, 160, 176, 192, 224, 234, 234, 240}, /* bits */
58 {{128, 128, 64}, {96, 112, 64}}, /* class0_fp */
59 {64, 96, 64}, /* fp */
60 160, /* class0_hp bit */
63 { /* hor component */ // NOLINT
65 {216, 128, 176, 160, 176, 176, 192, 198, 198, 208}, /* class */
67 {136, 140, 148, 160, 176, 192, 224, 234, 234, 240}, /* bits */
68 {{128, 128, 64}, {96, 112, 64}}, /* class0_fp */
69 {64, 96, 64}, /* fp */
70 160, /* class0_hp bit */
76 #define mv_class_base(c) ((c) ? (CLASS0_SIZE << (c + 2)) : 0)
78 static const uint8_t log_in_base_2[] = {
79 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4,
80 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
81 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6,
82 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
83 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
84 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
85 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
86 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
87 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
88 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
89 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8,
90 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
91 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
92 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
93 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
94 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
95 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
96 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
97 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
98 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
99 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
100 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
101 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
102 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
103 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
104 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
105 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
106 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
107 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
108 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
109 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
110 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
111 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
112 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
113 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
114 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
115 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
116 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
117 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
118 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
119 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
120 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
121 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 10
124 MV_CLASS_TYPE vp9_get_mv_class(int z, int *offset) {
125 MV_CLASS_TYPE c = MV_CLASS_0;
126 if (z >= CLASS0_SIZE * 4096)
129 c = log_in_base_2[z >> 3];
132 *offset = z - mv_class_base(c);
136 int vp9_use_mv_hp(const MV *ref) {
137 return (abs(ref->row) >> 3) < COMPANDED_MVREF_THRESH &&
138 (abs(ref->col) >> 3) < COMPANDED_MVREF_THRESH;
141 int vp9_get_mv_mag(MV_CLASS_TYPE c, int offset) {
142 return mv_class_base(c) + offset;
145 static void inc_mv_component(int v, nmv_component_counts *comp_counts,
146 int incr, int usehp) {
147 int s, z, c, o, d, e, f;
148 assert(v != 0); /* should not be zero */
150 comp_counts->sign[s] += incr;
151 z = (s ? -v : v) - 1; /* magnitude - 1 */
153 c = vp9_get_mv_class(z, &o);
154 comp_counts->classes[c] += incr;
156 d = (o >> 3); /* int mv data */
157 f = (o >> 1) & 3; /* fractional pel mv data */
158 e = (o & 1); /* high precision mv data */
160 if (c == MV_CLASS_0) {
161 comp_counts->class0[d] += incr;
162 comp_counts->class0_fp[d][f] += incr;
163 comp_counts->class0_hp[e] += usehp * incr;
166 int b = c + CLASS0_BITS - 1; // number of bits
167 for (i = 0; i < b; ++i)
168 comp_counts->bits[i][((d >> i) & 1)] += incr;
169 comp_counts->fp[f] += incr;
170 comp_counts->hp[e] += usehp * incr;
174 void vp9_inc_mv(const MV *mv, nmv_context_counts *counts) {
175 if (counts != NULL) {
176 const MV_JOINT_TYPE j = vp9_get_mv_joint(mv);
179 if (mv_joint_vertical(j)) {
180 inc_mv_component(mv->row, &counts->comps[0], 1, 1);
183 if (mv_joint_horizontal(j)) {
184 inc_mv_component(mv->col, &counts->comps[1], 1, 1);
189 static vp9_prob adapt_prob(vp9_prob prep, const unsigned int ct[2]) {
190 return merge_probs(prep, ct, MV_COUNT_SAT, MV_MAX_UPDATE_FACTOR);
193 static void adapt_probs(const vp9_tree_index *tree, const vp9_prob *pre_probs,
194 const unsigned int *counts, vp9_prob *probs) {
195 vp9_tree_merge_probs(tree, pre_probs, counts, MV_COUNT_SAT,
196 MV_MAX_UPDATE_FACTOR, probs);
199 void vp9_adapt_mv_probs(VP9_COMMON *cm, int allow_hp) {
202 nmv_context *fc = &cm->fc.nmvc;
203 const nmv_context *pre_fc = &cm->frame_contexts[cm->frame_context_idx].nmvc;
204 const nmv_context_counts *counts = &cm->counts.mv;
206 adapt_probs(vp9_mv_joint_tree, pre_fc->joints, counts->joints, fc->joints);
208 for (i = 0; i < 2; ++i) {
209 nmv_component *comp = &fc->comps[i];
210 const nmv_component *pre_comp = &pre_fc->comps[i];
211 const nmv_component_counts *c = &counts->comps[i];
213 comp->sign = adapt_prob(pre_comp->sign, c->sign);
214 adapt_probs(vp9_mv_class_tree, pre_comp->classes, c->classes,
216 adapt_probs(vp9_mv_class0_tree, pre_comp->class0, c->class0, comp->class0);
218 for (j = 0; j < MV_OFFSET_BITS; ++j)
219 comp->bits[j] = adapt_prob(pre_comp->bits[j], c->bits[j]);
221 for (j = 0; j < CLASS0_SIZE; ++j)
222 adapt_probs(vp9_mv_fp_tree, pre_comp->class0_fp[j], c->class0_fp[j],
225 adapt_probs(vp9_mv_fp_tree, pre_comp->fp, c->fp, comp->fp);
228 comp->class0_hp = adapt_prob(pre_comp->class0_hp, c->class0_hp);
229 comp->hp = adapt_prob(pre_comp->hp, c->hp);
234 void vp9_init_mv_probs(VP9_COMMON *cm) {
235 cm->fc.nmvc = default_nmv_context;