OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. | 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
275 int last_frame_percent_intra; | 275 int last_frame_percent_intra; |
276 | 276 |
277 int count_mb_ref_frame_usage[MAX_REF_FRAMES]; | 277 int count_mb_ref_frame_usage[MAX_REF_FRAMES]; |
278 | 278 |
279 } LAYER_CONTEXT; | 279 } LAYER_CONTEXT; |
280 | 280 |
281 typedef struct VP8_COMP | 281 typedef struct VP8_COMP |
282 { | 282 { |
283 | 283 |
284 DECLARE_ALIGNED(16, short, Y1quant[QINDEX_RANGE][16]); | 284 DECLARE_ALIGNED(16, short, Y1quant[QINDEX_RANGE][16]); |
285 DECLARE_ALIGNED(16, unsigned char, Y1quant_shift[QINDEX_RANGE][16]); | 285 DECLARE_ALIGNED(16, short, Y1quant_shift[QINDEX_RANGE][16]); |
286 DECLARE_ALIGNED(16, short, Y1zbin[QINDEX_RANGE][16]); | 286 DECLARE_ALIGNED(16, short, Y1zbin[QINDEX_RANGE][16]); |
287 DECLARE_ALIGNED(16, short, Y1round[QINDEX_RANGE][16]); | 287 DECLARE_ALIGNED(16, short, Y1round[QINDEX_RANGE][16]); |
288 | 288 |
289 DECLARE_ALIGNED(16, short, Y2quant[QINDEX_RANGE][16]); | 289 DECLARE_ALIGNED(16, short, Y2quant[QINDEX_RANGE][16]); |
290 DECLARE_ALIGNED(16, unsigned char, Y2quant_shift[QINDEX_RANGE][16]); | 290 DECLARE_ALIGNED(16, short, Y2quant_shift[QINDEX_RANGE][16]); |
291 DECLARE_ALIGNED(16, short, Y2zbin[QINDEX_RANGE][16]); | 291 DECLARE_ALIGNED(16, short, Y2zbin[QINDEX_RANGE][16]); |
292 DECLARE_ALIGNED(16, short, Y2round[QINDEX_RANGE][16]); | 292 DECLARE_ALIGNED(16, short, Y2round[QINDEX_RANGE][16]); |
293 | 293 |
294 DECLARE_ALIGNED(16, short, UVquant[QINDEX_RANGE][16]); | 294 DECLARE_ALIGNED(16, short, UVquant[QINDEX_RANGE][16]); |
295 DECLARE_ALIGNED(16, unsigned char, UVquant_shift[QINDEX_RANGE][16]); | 295 DECLARE_ALIGNED(16, short, UVquant_shift[QINDEX_RANGE][16]); |
296 DECLARE_ALIGNED(16, short, UVzbin[QINDEX_RANGE][16]); | 296 DECLARE_ALIGNED(16, short, UVzbin[QINDEX_RANGE][16]); |
297 DECLARE_ALIGNED(16, short, UVround[QINDEX_RANGE][16]); | 297 DECLARE_ALIGNED(16, short, UVround[QINDEX_RANGE][16]); |
298 | 298 |
299 DECLARE_ALIGNED(16, short, zrun_zbin_boost_y1[QINDEX_RANGE][16]); | 299 DECLARE_ALIGNED(16, short, zrun_zbin_boost_y1[QINDEX_RANGE][16]); |
300 DECLARE_ALIGNED(16, short, zrun_zbin_boost_y2[QINDEX_RANGE][16]); | 300 DECLARE_ALIGNED(16, short, zrun_zbin_boost_y2[QINDEX_RANGE][16]); |
301 DECLARE_ALIGNED(16, short, zrun_zbin_boost_uv[QINDEX_RANGE][16]); | 301 DECLARE_ALIGNED(16, short, zrun_zbin_boost_uv[QINDEX_RANGE][16]); |
302 DECLARE_ALIGNED(16, short, Y1quant_fast[QINDEX_RANGE][16]); | 302 DECLARE_ALIGNED(16, short, Y1quant_fast[QINDEX_RANGE][16]); |
303 DECLARE_ALIGNED(16, short, Y2quant_fast[QINDEX_RANGE][16]); | 303 DECLARE_ALIGNED(16, short, Y2quant_fast[QINDEX_RANGE][16]); |
304 DECLARE_ALIGNED(16, short, UVquant_fast[QINDEX_RANGE][16]); | 304 DECLARE_ALIGNED(16, short, UVquant_fast[QINDEX_RANGE][16]); |
305 | 305 |
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
580 int gf_decay_rate; | 580 int gf_decay_rate; |
581 int static_scene_max_gf_interval; | 581 int static_scene_max_gf_interval; |
582 int kf_bits; | 582 int kf_bits; |
583 /* Remaining error from uncoded frames in a gf group. */ | 583 /* Remaining error from uncoded frames in a gf group. */ |
584 int gf_group_error_left; | 584 int gf_group_error_left; |
585 /* Projected total bits available for a key frame group of frames */ | 585 /* Projected total bits available for a key frame group of frames */ |
586 int64_t kf_group_bits; | 586 int64_t kf_group_bits; |
587 /* Error score of frames still to be coded in kf group */ | 587 /* Error score of frames still to be coded in kf group */ |
588 int64_t kf_group_error_left; | 588 int64_t kf_group_error_left; |
589 /* Projected Bits available for a group including 1 GF or ARF */ | 589 /* Projected Bits available for a group including 1 GF or ARF */ |
590 int gf_group_bits; | 590 int64_t gf_group_bits; |
591 /* Bits for the golden frame or ARF */ | 591 /* Bits for the golden frame or ARF */ |
592 int gf_bits; | 592 int gf_bits; |
593 int alt_extra_bits; | 593 int alt_extra_bits; |
594 double est_max_qcorrection_factor; | 594 double est_max_qcorrection_factor; |
595 } twopass; | 595 } twopass; |
596 | 596 |
597 #if VP8_TEMPORAL_ALT_REF | 597 #if VP8_TEMPORAL_ALT_REF |
598 YV12_BUFFER_CONFIG alt_ref_buffer; | 598 YV12_BUFFER_CONFIG alt_ref_buffer; |
599 YV12_BUFFER_CONFIG *frames[MAX_LAG_BUFFERS]; | 599 YV12_BUFFER_CONFIG *frames[MAX_LAG_BUFFERS]; |
600 int fixed_divide[512]; | 600 int fixed_divide[512]; |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
710 } while(0) | 710 } while(0) |
711 #else | 711 #else |
712 #define CHECK_MEM_ERROR(lval,expr) do {\ | 712 #define CHECK_MEM_ERROR(lval,expr) do {\ |
713 lval = (expr); \ | 713 lval = (expr); \ |
714 if(!lval) \ | 714 if(!lval) \ |
715 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,\ | 715 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,\ |
716 "Failed to allocate "#lval);\ | 716 "Failed to allocate "#lval);\ |
717 } while(0) | 717 } while(0) |
718 #endif | 718 #endif |
719 #endif | 719 #endif |
OLD | NEW |