Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(271)

Side by Side Diff: media/base/video_frame.cc

Issue 11269017: Plumb through cropped output size for VideoFrame (Closed) Base URL: https://git.chromium.org/git/chromium/src@git-svn
Patch Set: Found the windows failure, and fixed it. Thanks akalin@ Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/base/video_frame.h ('k') | media/base/video_frame_unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/base/video_frame.h" 5 #include "media/base/video_frame.h"
6 6
7 #include "base/logging.h" 7 #include "base/logging.h"
8 #include "base/string_piece.h" 8 #include "base/string_piece.h"
9 #include "media/base/limits.h" 9 #include "media/base/limits.h"
10 #include "media/base/video_util.h" 10 #include "media/base/video_util.h"
11 #if !defined(OS_ANDROID) 11 #if !defined(OS_ANDROID)
12 #include "media/ffmpeg/ffmpeg_common.h" 12 #include "media/ffmpeg/ffmpeg_common.h"
13 #endif 13 #endif
14 14
15 #include <algorithm> 15 #include <algorithm>
16 16
17 namespace media { 17 namespace media {
18 18
19 // static 19 // static
20 scoped_refptr<VideoFrame> VideoFrame::CreateFrame( 20 scoped_refptr<VideoFrame> VideoFrame::CreateFrame(
21 VideoFrame::Format format, 21 VideoFrame::Format format,
22 const gfx::Size& data_size, 22 const gfx::Size& coded_size,
23 const gfx::Rect& visible_rect,
23 const gfx::Size& natural_size, 24 const gfx::Size& natural_size,
24 base::TimeDelta timestamp) { 25 base::TimeDelta timestamp) {
25 DCHECK(IsValidConfig(format, data_size, natural_size)); 26 DCHECK(IsValidConfig(format, coded_size, visible_rect, natural_size));
26 scoped_refptr<VideoFrame> frame(new VideoFrame( 27 scoped_refptr<VideoFrame> frame(new VideoFrame(
27 format, data_size, natural_size, timestamp)); 28 format, coded_size, visible_rect, natural_size, timestamp));
28 switch (format) { 29 switch (format) {
29 case VideoFrame::RGB32: 30 case VideoFrame::RGB32:
30 frame->AllocateRGB(4u); 31 frame->AllocateRGB(4u);
31 break; 32 break;
32 case VideoFrame::YV12: 33 case VideoFrame::YV12:
33 case VideoFrame::YV16: 34 case VideoFrame::YV16:
34 frame->AllocateYUV(); 35 frame->AllocateYUV();
35 break; 36 break;
36 default: 37 default:
37 LOG(FATAL) << "Unsupported frame format: " << format; 38 LOG(FATAL) << "Unsupported frame format: " << format;
38 } 39 }
39 return frame; 40 return frame;
40 } 41 }
41 42
42 // static 43 // static
43 bool VideoFrame::IsValidConfig(VideoFrame::Format format, 44 bool VideoFrame::IsValidConfig(VideoFrame::Format format,
44 const gfx::Size& data_size, 45 const gfx::Size& coded_size,
46 const gfx::Rect& visible_rect,
45 const gfx::Size& natural_size) { 47 const gfx::Size& natural_size) {
46 return (format != VideoFrame::INVALID && 48 return (format != VideoFrame::INVALID &&
47 data_size.width() > 0 && data_size.height() > 0 && 49 !coded_size.IsEmpty() &&
48 data_size.width() <= limits::kMaxDimension && 50 coded_size.GetArea() <= limits::kMaxCanvas &&
49 data_size.height() <= limits::kMaxDimension && 51 coded_size.width() <= limits::kMaxDimension &&
50 data_size.width() * data_size.height() <= limits::kMaxCanvas && 52 coded_size.height() <= limits::kMaxDimension &&
51 natural_size.width() > 0 && natural_size.height() > 0 && 53 !visible_rect.IsEmpty() &&
54 visible_rect.x() >= 0 && visible_rect.y() >= 0 &&
55 visible_rect.right() <= coded_size.width() &&
56 visible_rect.bottom() <= coded_size.height() &&
57 !natural_size.IsEmpty() &&
58 natural_size.GetArea() <= limits::kMaxCanvas &&
52 natural_size.width() <= limits::kMaxDimension && 59 natural_size.width() <= limits::kMaxDimension &&
53 natural_size.height() <= limits::kMaxDimension && 60 natural_size.height() <= limits::kMaxDimension);
54 natural_size.width() * natural_size.height() <= limits::kMaxCanvas);
55 } 61 }
56 62
57 // static 63 // static
58 scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture( 64 scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture(
59 uint32 texture_id, 65 uint32 texture_id,
60 uint32 texture_target, 66 uint32 texture_target,
61 const gfx::Size& data_size, 67 const gfx::Size& coded_size,
68 const gfx::Rect& visible_rect,
62 const gfx::Size& natural_size, 69 const gfx::Size& natural_size,
63 base::TimeDelta timestamp, 70 base::TimeDelta timestamp,
64 const ReadPixelsCB& read_pixels_cb, 71 const ReadPixelsCB& read_pixels_cb,
65 const base::Closure& no_longer_needed) { 72 const base::Closure& no_longer_needed) {
66 scoped_refptr<VideoFrame> frame( 73 scoped_refptr<VideoFrame> frame(
67 new VideoFrame(NATIVE_TEXTURE, data_size, natural_size, timestamp)); 74 new VideoFrame(NATIVE_TEXTURE, coded_size, visible_rect, natural_size,
75 timestamp));
68 frame->texture_id_ = texture_id; 76 frame->texture_id_ = texture_id;
69 frame->texture_target_ = texture_target; 77 frame->texture_target_ = texture_target;
70 frame->read_pixels_cb_ = read_pixels_cb; 78 frame->read_pixels_cb_ = read_pixels_cb;
71 frame->texture_no_longer_needed_ = no_longer_needed; 79 frame->texture_no_longer_needed_ = no_longer_needed;
72 return frame; 80 return frame;
73 } 81 }
74 82
75 void VideoFrame::ReadPixelsFromNativeTexture(void* pixels) { 83 void VideoFrame::ReadPixelsFromNativeTexture(void* pixels) {
76 DCHECK_EQ(format_, NATIVE_TEXTURE); 84 DCHECK_EQ(format_, NATIVE_TEXTURE);
77 if (!read_pixels_cb_.is_null()) 85 if (!read_pixels_cb_.is_null())
78 read_pixels_cb_.Run(pixels); 86 read_pixels_cb_.Run(pixels);
79 } 87 }
80 88
81 // static 89 // static
82 scoped_refptr<VideoFrame> VideoFrame::CreateEmptyFrame() { 90 scoped_refptr<VideoFrame> VideoFrame::CreateEmptyFrame() {
83 return new VideoFrame( 91 return new VideoFrame(
84 VideoFrame::EMPTY, gfx::Size(), gfx::Size(), base::TimeDelta()); 92 VideoFrame::EMPTY, gfx::Size(), gfx::Rect(), gfx::Size(),
93 base::TimeDelta());
85 } 94 }
86 95
87 // static 96 // static
88 scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame( 97 scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame(
89 const gfx::Size& size, 98 const gfx::Size& size,
90 uint8 y, uint8 u, uint8 v, 99 uint8 y, uint8 u, uint8 v,
91 base::TimeDelta timestamp) { 100 base::TimeDelta timestamp) {
92 DCHECK(IsValidConfig(VideoFrame::YV12, size, size)); 101 DCHECK(IsValidConfig(VideoFrame::YV12, size, gfx::Rect(size), size));
93 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame( 102 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
94 VideoFrame::YV12, size, size, timestamp); 103 VideoFrame::YV12, size, gfx::Rect(size), size, timestamp);
95 FillYUV(frame, y, u, v); 104 FillYUV(frame, y, u, v);
96 return frame; 105 return frame;
97 } 106 }
98 107
99 // static 108 // static
100 scoped_refptr<VideoFrame> VideoFrame::CreateBlackFrame(const gfx::Size& size) { 109 scoped_refptr<VideoFrame> VideoFrame::CreateBlackFrame(const gfx::Size& size) {
101 const uint8 kBlackY = 0x00; 110 const uint8 kBlackY = 0x00;
102 const uint8 kBlackUV = 0x80; 111 const uint8 kBlackUV = 0x80;
103 const base::TimeDelta kZero; 112 const base::TimeDelta kZero;
104 return CreateColorFrame(size, kBlackY, kBlackUV, kBlackUV, kZero); 113 return CreateColorFrame(size, kBlackY, kBlackUV, kBlackUV, kZero);
105 } 114 }
106 115
107 static inline size_t RoundUp(size_t value, size_t alignment) { 116 static inline size_t RoundUp(size_t value, size_t alignment) {
108 // Check that |alignment| is a power of 2. 117 // Check that |alignment| is a power of 2.
109 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); 118 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
110 return ((value + (alignment - 1)) & ~(alignment-1)); 119 return ((value + (alignment - 1)) & ~(alignment-1));
111 } 120 }
112 121
113 static const int kFrameSizeAlignment = 16; 122 static const int kFrameSizeAlignment = 16;
114 // Allows faster SIMD YUV convert. Also, FFmpeg overreads/-writes occasionally. 123 // Allows faster SIMD YUV convert. Also, FFmpeg overreads/-writes occasionally.
115 static const int kFramePadBytes = 15; 124 static const int kFramePadBytes = 15;
116 125
117 void VideoFrame::AllocateRGB(size_t bytes_per_pixel) { 126 void VideoFrame::AllocateRGB(size_t bytes_per_pixel) {
118 // Round up to align at least at a 16-byte boundary for each row. 127 // Round up to align at least at a 16-byte boundary for each row.
119 // This is sufficient for MMX and SSE2 reads (movq/movdqa). 128 // This is sufficient for MMX and SSE2 reads (movq/movdqa).
120 size_t bytes_per_row = RoundUp(data_size_.width(), 129 size_t bytes_per_row = RoundUp(coded_size_.width(),
121 kFrameSizeAlignment) * bytes_per_pixel; 130 kFrameSizeAlignment) * bytes_per_pixel;
122 size_t aligned_height = RoundUp(data_size_.height(), kFrameSizeAlignment); 131 size_t aligned_height = RoundUp(coded_size_.height(), kFrameSizeAlignment);
123 strides_[VideoFrame::kRGBPlane] = bytes_per_row; 132 strides_[VideoFrame::kRGBPlane] = bytes_per_row;
124 #if !defined(OS_ANDROID) 133 #if !defined(OS_ANDROID)
125 // TODO(dalecurtis): use DataAligned or so, so this #ifdef hackery 134 // TODO(dalecurtis): use DataAligned or so, so this #ifdef hackery
126 // doesn't need to be repeated in every single user of aligned data. 135 // doesn't need to be repeated in every single user of aligned data.
127 data_[VideoFrame::kRGBPlane] = reinterpret_cast<uint8*>( 136 data_[VideoFrame::kRGBPlane] = reinterpret_cast<uint8*>(
128 av_malloc(bytes_per_row * aligned_height + kFramePadBytes)); 137 av_malloc(bytes_per_row * aligned_height + kFramePadBytes));
129 #else 138 #else
130 data_[VideoFrame::kRGBPlane] = new uint8_t[bytes_per_row * aligned_height]; 139 data_[VideoFrame::kRGBPlane] = new uint8_t[bytes_per_row * aligned_height];
131 #endif 140 #endif
132 DCHECK(!(reinterpret_cast<intptr_t>(data_[VideoFrame::kRGBPlane]) & 7)); 141 DCHECK(!(reinterpret_cast<intptr_t>(data_[VideoFrame::kRGBPlane]) & 7));
(...skipping 11 matching lines...) Expand all
144 // number to avoid any potential of faulting by code that attempts to access 153 // number to avoid any potential of faulting by code that attempts to access
145 // the Y values of the final row, but assumes that the last row of U & V 154 // the Y values of the final row, but assumes that the last row of U & V
146 // applies to a full two rows of Y. 155 // applies to a full two rows of Y.
147 size_t y_stride = RoundUp(row_bytes(VideoFrame::kYPlane), 156 size_t y_stride = RoundUp(row_bytes(VideoFrame::kYPlane),
148 kFrameSizeAlignment); 157 kFrameSizeAlignment);
149 size_t uv_stride = RoundUp(row_bytes(VideoFrame::kUPlane), 158 size_t uv_stride = RoundUp(row_bytes(VideoFrame::kUPlane),
150 kFrameSizeAlignment); 159 kFrameSizeAlignment);
151 // The *2 here is because some formats (e.g. h264) allow interlaced coding, 160 // The *2 here is because some formats (e.g. h264) allow interlaced coding,
152 // and then the size needs to be a multiple of two macroblocks (vertically). 161 // and then the size needs to be a multiple of two macroblocks (vertically).
153 // See libavcodec/utils.c:avcodec_align_dimensions2(). 162 // See libavcodec/utils.c:avcodec_align_dimensions2().
154 size_t y_height = RoundUp(data_size_.height(), kFrameSizeAlignment * 2); 163 size_t y_height = RoundUp(coded_size_.height(), kFrameSizeAlignment * 2);
155 size_t uv_height = format_ == VideoFrame::YV12 ? y_height / 2 : y_height; 164 size_t uv_height = format_ == VideoFrame::YV12 ? y_height / 2 : y_height;
156 size_t y_bytes = y_height * y_stride; 165 size_t y_bytes = y_height * y_stride;
157 size_t uv_bytes = uv_height * uv_stride; 166 size_t uv_bytes = uv_height * uv_stride;
158 167
159 #if !defined(OS_ANDROID) 168 #if !defined(OS_ANDROID)
160 // TODO(dalecurtis): use DataAligned or so, so this #ifdef hackery 169 // TODO(dalecurtis): use DataAligned or so, so this #ifdef hackery
161 // doesn't need to be repeated in every single user of aligned data. 170 // doesn't need to be repeated in every single user of aligned data.
162 // The extra line of UV being allocated is because h264 chroma MC 171 // The extra line of UV being allocated is because h264 chroma MC
163 // overreads by one line in some cases, see libavcodec/utils.c: 172 // overreads by one line in some cases, see libavcodec/utils.c:
164 // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm: 173 // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm:
165 // put_h264_chroma_mc4_ssse3(). 174 // put_h264_chroma_mc4_ssse3().
166 uint8* data = reinterpret_cast<uint8*>( 175 uint8* data = reinterpret_cast<uint8*>(
167 av_malloc(y_bytes + (uv_bytes * 2 + uv_stride) + kFramePadBytes)); 176 av_malloc(y_bytes + (uv_bytes * 2 + uv_stride) + kFramePadBytes));
168 #else 177 #else
169 uint8* data = new uint8_t[y_bytes + (uv_bytes * 2)]; 178 uint8* data = new uint8_t[y_bytes + (uv_bytes * 2)];
170 #endif 179 #endif
171 COMPILE_ASSERT(0 == VideoFrame::kYPlane, y_plane_data_must_be_index_0); 180 COMPILE_ASSERT(0 == VideoFrame::kYPlane, y_plane_data_must_be_index_0);
172 data_[VideoFrame::kYPlane] = data; 181 data_[VideoFrame::kYPlane] = data;
173 data_[VideoFrame::kUPlane] = data + y_bytes; 182 data_[VideoFrame::kUPlane] = data + y_bytes;
174 data_[VideoFrame::kVPlane] = data + y_bytes + uv_bytes; 183 data_[VideoFrame::kVPlane] = data + y_bytes + uv_bytes;
175 strides_[VideoFrame::kYPlane] = y_stride; 184 strides_[VideoFrame::kYPlane] = y_stride;
176 strides_[VideoFrame::kUPlane] = uv_stride; 185 strides_[VideoFrame::kUPlane] = uv_stride;
177 strides_[VideoFrame::kVPlane] = uv_stride; 186 strides_[VideoFrame::kVPlane] = uv_stride;
178 } 187 }
179 188
180 VideoFrame::VideoFrame(VideoFrame::Format format, 189 VideoFrame::VideoFrame(VideoFrame::Format format,
181 const gfx::Size& data_size, 190 const gfx::Size& coded_size,
191 const gfx::Rect& visible_rect,
182 const gfx::Size& natural_size, 192 const gfx::Size& natural_size,
183 base::TimeDelta timestamp) 193 base::TimeDelta timestamp)
184 : format_(format), 194 : format_(format),
185 data_size_(data_size), 195 coded_size_(coded_size),
196 visible_rect_(visible_rect),
186 natural_size_(natural_size), 197 natural_size_(natural_size),
187 texture_id_(0), 198 texture_id_(0),
188 texture_target_(0), 199 texture_target_(0),
189 timestamp_(timestamp) { 200 timestamp_(timestamp) {
190 memset(&strides_, 0, sizeof(strides_)); 201 memset(&strides_, 0, sizeof(strides_));
191 memset(&data_, 0, sizeof(data_)); 202 memset(&data_, 0, sizeof(data_));
192 } 203 }
193 204
194 VideoFrame::~VideoFrame() { 205 VideoFrame::~VideoFrame() {
195 if (format_ == NATIVE_TEXTURE && !texture_no_longer_needed_.is_null()) { 206 if (format_ == NATIVE_TEXTURE && !texture_no_longer_needed_.is_null()) {
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
231 return false; 242 return false;
232 } 243 }
233 244
234 int VideoFrame::stride(size_t plane) const { 245 int VideoFrame::stride(size_t plane) const {
235 DCHECK(IsValidPlane(plane)); 246 DCHECK(IsValidPlane(plane));
236 return strides_[plane]; 247 return strides_[plane];
237 } 248 }
238 249
239 int VideoFrame::row_bytes(size_t plane) const { 250 int VideoFrame::row_bytes(size_t plane) const {
240 DCHECK(IsValidPlane(plane)); 251 DCHECK(IsValidPlane(plane));
241 int width = data_size_.width(); 252 int width = coded_size_.width();
242 switch (format_) { 253 switch (format_) {
243 // 32bpp. 254 // 32bpp.
244 case RGB32: 255 case RGB32:
245 return width * 4; 256 return width * 4;
246 257
247 // Planar, 8bpp. 258 // Planar, 8bpp.
248 case YV12: 259 case YV12:
249 case YV16: 260 case YV16:
250 if (plane == kYPlane) 261 if (plane == kYPlane)
251 return width; 262 return width;
252 return RoundUp(width, 2) / 2; 263 return RoundUp(width, 2) / 2;
253 264
254 default: 265 default:
255 break; 266 break;
256 } 267 }
257 268
258 // Intentionally leave out non-production formats. 269 // Intentionally leave out non-production formats.
259 NOTREACHED() << "Unsupported video frame format: " << format_; 270 NOTREACHED() << "Unsupported video frame format: " << format_;
260 return 0; 271 return 0;
261 } 272 }
262 273
263 int VideoFrame::rows(size_t plane) const { 274 int VideoFrame::rows(size_t plane) const {
264 DCHECK(IsValidPlane(plane)); 275 DCHECK(IsValidPlane(plane));
265 int height = data_size_.height(); 276 int height = coded_size_.height();
266 switch (format_) { 277 switch (format_) {
267 case RGB32: 278 case RGB32:
268 case YV16: 279 case YV16:
269 return height; 280 return height;
270 281
271 case YV12: 282 case YV12:
272 if (plane == kYPlane) 283 if (plane == kYPlane)
273 return height; 284 return height;
274 return RoundUp(height, 2) / 2; 285 return RoundUp(height, 2) / 2;
275 286
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
307 break; 318 break;
308 for(int row = 0; row < rows(plane); row++) { 319 for(int row = 0; row < rows(plane); row++) {
309 base::MD5Update(context, base::StringPiece( 320 base::MD5Update(context, base::StringPiece(
310 reinterpret_cast<char*>(data(plane) + stride(plane) * row), 321 reinterpret_cast<char*>(data(plane) + stride(plane) * row),
311 row_bytes(plane))); 322 row_bytes(plane)));
312 } 323 }
313 } 324 }
314 } 325 }
315 326
316 } // namespace media 327 } // namespace media
OLDNEW
« no previous file with comments | « media/base/video_frame.h ('k') | media/base/video_frame_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698