OLD | NEW |
---|---|
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <libdrm/drm_fourcc.h> | 5 #include <libdrm/drm_fourcc.h> |
6 #include <linux/videodev2.h> | 6 #include <linux/videodev2.h> |
7 #include <string.h> | 7 #include <string.h> |
8 | 8 |
9 #include "base/numerics/safe_conversions.h" | 9 #include "base/numerics/safe_conversions.h" |
10 #include "build/build_config.h" | 10 #include "build/build_config.h" |
11 #include "media/gpu/generic_v4l2_device.h" | 11 #include "media/gpu/generic_v4l2_device.h" |
12 #if defined(ARCH_CPU_ARMEL) | 12 #if defined(ARCH_CPU_ARMEL) |
13 #include "media/gpu/tegra_v4l2_device.h" | 13 #include "media/gpu/tegra_v4l2_device.h" |
14 #endif | 14 #endif |
15 | 15 |
16 namespace media { | 16 namespace media { |
17 | 17 |
18 V4L2Device::V4L2Device(Type type) : type_(type) {} | 18 V4L2Device::V4L2Device() {} |
19 | 19 |
20 V4L2Device::~V4L2Device() {} | 20 V4L2Device::~V4L2Device() {} |
21 | 21 |
22 // static | 22 // static |
23 scoped_refptr<V4L2Device> V4L2Device::Create(Type type) { | 23 scoped_refptr<V4L2Device> V4L2Device::Create() { |
24 DVLOG(3) << __PRETTY_FUNCTION__; | 24 DVLOG(3) << __PRETTY_FUNCTION__; |
25 | 25 |
26 scoped_refptr<GenericV4L2Device> generic_device(new GenericV4L2Device(type)); | |
27 if (generic_device->Initialize()) | |
28 return generic_device; | |
29 | |
30 #if defined(ARCH_CPU_ARMEL) | 26 #if defined(ARCH_CPU_ARMEL) |
31 scoped_refptr<TegraV4L2Device> tegra_device(new TegraV4L2Device(type)); | 27 scoped_refptr<TegraV4L2Device> tegra_device(new TegraV4L2Device()); |
kcwu
2016/10/07 11:19:04
Change it to scoped_refptr<V4L2Device> in order to
Pawel Osciak
2016/10/11 06:13:45
Done.
| |
32 if (tegra_device->Initialize()) | 28 if (tegra_device->Initialize()) |
33 return tegra_device; | 29 return tegra_device; |
34 #endif | 30 #endif |
35 | 31 |
36 DVLOG(1) << "Failed to create V4L2Device"; | 32 scoped_refptr<GenericV4L2Device> generic_device(new GenericV4L2Device()); |
kcwu
2016/10/07 11:19:05
Change it to scoped_refptr<V4L2Device> in order to
Pawel Osciak
2016/10/11 06:13:45
Done.
| |
37 return scoped_refptr<V4L2Device>(); | 33 if (generic_device->Initialize()) |
34 return generic_device; | |
35 | |
36 DVLOG(1) << "Failed to create a V4L2Device"; | |
37 return nullptr; | |
38 } | 38 } |
39 | 39 |
40 // static | 40 // static |
41 VideoPixelFormat V4L2Device::V4L2PixFmtToVideoPixelFormat(uint32_t pix_fmt) { | 41 VideoPixelFormat V4L2Device::V4L2PixFmtToVideoPixelFormat(uint32_t pix_fmt) { |
42 switch (pix_fmt) { | 42 switch (pix_fmt) { |
43 case V4L2_PIX_FMT_NV12: | 43 case V4L2_PIX_FMT_NV12: |
44 case V4L2_PIX_FMT_NV12M: | 44 case V4L2_PIX_FMT_NV12M: |
45 return PIXEL_FORMAT_NV12; | 45 return PIXEL_FORMAT_NV12; |
46 | 46 |
47 case V4L2_PIX_FMT_MT21: | 47 case V4L2_PIX_FMT_MT21: |
48 return PIXEL_FORMAT_MT21; | 48 return PIXEL_FORMAT_MT21; |
49 | 49 |
50 case V4L2_PIX_FMT_YUV420: | 50 case V4L2_PIX_FMT_YUV420: |
51 case V4L2_PIX_FMT_YUV420M: | 51 case V4L2_PIX_FMT_YUV420M: |
52 return PIXEL_FORMAT_I420; | 52 return PIXEL_FORMAT_I420; |
53 | 53 |
54 case V4L2_PIX_FMT_YVU420: | 54 case V4L2_PIX_FMT_YVU420: |
55 return PIXEL_FORMAT_YV12; | 55 return PIXEL_FORMAT_YV12; |
56 | 56 |
57 case V4L2_PIX_FMT_RGB32: | 57 case V4L2_PIX_FMT_RGB32: |
58 return PIXEL_FORMAT_ARGB; | 58 return PIXEL_FORMAT_ARGB; |
59 | 59 |
60 default: | 60 default: |
61 LOG(FATAL) << "Add more cases as needed"; | 61 DVLOG(1) << "Add more cases as needed"; |
kcwu
2016/10/07 11:19:05
How about DLOG(FATAL) ? anyway, up to you.
Pawel Osciak
2016/10/11 06:13:45
Thinking more about this, we should probably log o
| |
62 return PIXEL_FORMAT_UNKNOWN; | 62 return PIXEL_FORMAT_UNKNOWN; |
63 } | 63 } |
64 } | 64 } |
65 | 65 |
66 // static | 66 // static |
67 uint32_t V4L2Device::VideoPixelFormatToV4L2PixFmt(VideoPixelFormat format) { | 67 uint32_t V4L2Device::VideoPixelFormatToV4L2PixFmt(VideoPixelFormat format) { |
68 switch (format) { | 68 switch (format) { |
69 case PIXEL_FORMAT_NV12: | 69 case PIXEL_FORMAT_NV12: |
70 return V4L2_PIX_FMT_NV12M; | 70 return V4L2_PIX_FMT_NV12M; |
71 | 71 |
72 case PIXEL_FORMAT_MT21: | 72 case PIXEL_FORMAT_MT21: |
73 return V4L2_PIX_FMT_MT21; | 73 return V4L2_PIX_FMT_MT21; |
74 | 74 |
75 case PIXEL_FORMAT_I420: | 75 case PIXEL_FORMAT_I420: |
76 return V4L2_PIX_FMT_YUV420M; | 76 return V4L2_PIX_FMT_YUV420M; |
77 | 77 |
78 case PIXEL_FORMAT_YV12: | 78 case PIXEL_FORMAT_YV12: |
79 return V4L2_PIX_FMT_YVU420; | 79 return V4L2_PIX_FMT_YVU420; |
80 | 80 |
81 default: | 81 default: |
82 LOG(FATAL) << "Add more cases as needed"; | 82 DVLOG(1) << "Add more cases as needed"; |
83 return 0; | 83 return 0; |
84 } | 84 } |
85 } | 85 } |
86 | 86 |
87 // static | 87 // static |
88 uint32_t V4L2Device::VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile, | 88 uint32_t V4L2Device::VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile, |
89 bool slice_based) { | 89 bool slice_based) { |
90 if (profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX) { | 90 if (profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX) { |
91 if (slice_based) | 91 if (slice_based) |
92 return V4L2_PIX_FMT_H264_SLICE; | 92 return V4L2_PIX_FMT_H264_SLICE; |
93 else | 93 else |
94 return V4L2_PIX_FMT_H264; | 94 return V4L2_PIX_FMT_H264; |
95 } else if (profile >= VP8PROFILE_MIN && profile <= VP8PROFILE_MAX) { | 95 } else if (profile >= VP8PROFILE_MIN && profile <= VP8PROFILE_MAX) { |
96 if (slice_based) | 96 if (slice_based) |
97 return V4L2_PIX_FMT_VP8_FRAME; | 97 return V4L2_PIX_FMT_VP8_FRAME; |
98 else | 98 else |
99 return V4L2_PIX_FMT_VP8; | 99 return V4L2_PIX_FMT_VP8; |
100 } else if (profile >= VP9PROFILE_MIN && profile <= VP9PROFILE_MAX) { | 100 } else if (profile >= VP9PROFILE_MIN && profile <= VP9PROFILE_MAX) { |
101 if (slice_based) | 101 if (slice_based) |
102 return V4L2_PIX_FMT_VP9_FRAME; | 102 return V4L2_PIX_FMT_VP9_FRAME; |
103 else | 103 else |
104 return V4L2_PIX_FMT_VP9; | 104 return V4L2_PIX_FMT_VP9; |
105 } else { | 105 } else { |
106 LOG(FATAL) << "Add more cases as needed"; | 106 DVLOG(1) << "Add more cases as needed"; |
107 return 0; | 107 return 0; |
108 } | 108 } |
109 } | 109 } |
110 | 110 |
111 // static | 111 // static |
112 std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles( | |
113 uint32_t pix_fmt, | |
114 bool is_encoder) { | |
115 VideoCodecProfile min_profile, max_profile; | |
116 std::vector<VideoCodecProfile> profiles; | |
117 | |
118 switch (pix_fmt) { | |
119 case V4L2_PIX_FMT_H264: | |
120 case V4L2_PIX_FMT_H264_SLICE: | |
121 if (is_encoder) { | |
122 // TODO(posciak): need to query the device for supported H.264 profiles, | |
123 // for now choose Main as a sensible default. | |
124 min_profile = H264PROFILE_MAIN; | |
125 max_profile = H264PROFILE_MAIN; | |
126 } else { | |
127 min_profile = H264PROFILE_MIN; | |
128 max_profile = H264PROFILE_MAX; | |
129 } | |
130 break; | |
131 | |
132 case V4L2_PIX_FMT_VP8: | |
133 case V4L2_PIX_FMT_VP8_FRAME: | |
134 min_profile = VP8PROFILE_MIN; | |
135 max_profile = VP8PROFILE_MAX; | |
136 break; | |
137 | |
138 case V4L2_PIX_FMT_VP9: | |
139 case V4L2_PIX_FMT_VP9_FRAME: | |
140 min_profile = VP9PROFILE_MIN; | |
141 max_profile = VP9PROFILE_MAX; | |
142 break; | |
143 | |
144 default: | |
145 DVLOG(1) << "Unhandled pixelformat " << std::hex << "0x" << pix_fmt; | |
kcwu
2016/10/07 11:19:04
"0x" could be merged with previous string literal.
Pawel Osciak
2016/10/11 06:13:45
Yes, I felt this was more readable.
| |
146 return profiles; | |
147 } | |
148 | |
149 for (int profile = min_profile; profile <= max_profile; ++profile) | |
150 profiles.push_back(static_cast<VideoCodecProfile>(profile)); | |
151 | |
152 return profiles; | |
153 } | |
154 | |
155 // static | |
112 uint32_t V4L2Device::V4L2PixFmtToDrmFormat(uint32_t format) { | 156 uint32_t V4L2Device::V4L2PixFmtToDrmFormat(uint32_t format) { |
113 switch (format) { | 157 switch (format) { |
114 case V4L2_PIX_FMT_NV12: | 158 case V4L2_PIX_FMT_NV12: |
115 case V4L2_PIX_FMT_NV12M: | 159 case V4L2_PIX_FMT_NV12M: |
116 return DRM_FORMAT_NV12; | 160 return DRM_FORMAT_NV12; |
117 | 161 |
118 case V4L2_PIX_FMT_YUV420: | 162 case V4L2_PIX_FMT_YUV420: |
119 case V4L2_PIX_FMT_YUV420M: | 163 case V4L2_PIX_FMT_YUV420M: |
120 return DRM_FORMAT_YUV420; | 164 return DRM_FORMAT_YUV420; |
121 | 165 |
122 case V4L2_PIX_FMT_YVU420: | 166 case V4L2_PIX_FMT_YVU420: |
123 return DRM_FORMAT_YVU420; | 167 return DRM_FORMAT_YVU420; |
124 | 168 |
125 case V4L2_PIX_FMT_RGB32: | 169 case V4L2_PIX_FMT_RGB32: |
126 return DRM_FORMAT_ARGB8888; | 170 return DRM_FORMAT_ARGB8888; |
127 | 171 |
172 case V4L2_PIX_FMT_MT21: | |
173 return DRM_FORMAT_MT21; | |
174 | |
128 default: | 175 default: |
129 DVLOG(1) << "Add more cases as needed"; | 176 DVLOG(1) << "Unrecognized format " << std::hex << "0x" << format; |
130 return 0; | 177 return 0; |
131 } | 178 } |
132 } | 179 } |
133 | 180 |
134 // static | 181 // static |
135 gfx::Size V4L2Device::CodedSizeFromV4L2Format(struct v4l2_format format) { | 182 gfx::Size V4L2Device::CodedSizeFromV4L2Format(struct v4l2_format format) { |
136 gfx::Size coded_size; | 183 gfx::Size coded_size; |
137 gfx::Size visible_size; | 184 gfx::Size visible_size; |
138 VideoPixelFormat frame_format = PIXEL_FORMAT_UNKNOWN; | 185 VideoPixelFormat frame_format = PIXEL_FORMAT_UNKNOWN; |
139 size_t bytesperline = 0; | 186 size_t bytesperline = 0; |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
252 << ", fall back to " << max_resolution->ToString(); | 299 << ", fall back to " << max_resolution->ToString(); |
253 } | 300 } |
254 if (min_resolution->IsEmpty()) { | 301 if (min_resolution->IsEmpty()) { |
255 min_resolution->SetSize(16, 16); | 302 min_resolution->SetSize(16, 16); |
256 LOG(ERROR) << "GetSupportedResolution failed to get minimum resolution for " | 303 LOG(ERROR) << "GetSupportedResolution failed to get minimum resolution for " |
257 << "fourcc " << std::hex << pixelformat | 304 << "fourcc " << std::hex << pixelformat |
258 << ", fall back to " << min_resolution->ToString(); | 305 << ", fall back to " << min_resolution->ToString(); |
259 } | 306 } |
260 } | 307 } |
261 | 308 |
262 VideoDecodeAccelerator::SupportedProfiles | 309 std::vector<uint32_t> V4L2Device::EnumerateSupportedPixelformats( |
263 V4L2Device::GetSupportedDecodeProfiles(const size_t num_formats, | 310 v4l2_buf_type buf_type) { |
264 const uint32_t pixelformats[]) { | 311 std::vector<uint32_t> pixelformats; |
265 DCHECK_EQ(type_, kDecoder); | 312 |
266 VideoDecodeAccelerator::SupportedProfiles profiles; | |
267 VideoDecodeAccelerator::SupportedProfile profile; | |
268 v4l2_fmtdesc fmtdesc; | 313 v4l2_fmtdesc fmtdesc; |
269 memset(&fmtdesc, 0, sizeof(fmtdesc)); | 314 memset(&fmtdesc, 0, sizeof(fmtdesc)); |
270 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 315 fmtdesc.type = buf_type; |
271 | 316 |
272 for (; Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0; ++fmtdesc.index) { | 317 for (; Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0; ++fmtdesc.index) { |
273 if (std::find(pixelformats, pixelformats + num_formats, | 318 DVLOG(1) << "Found " << std::hex << fmtdesc.pixelformat << " " |
kcwu
2016/10/07 11:19:04
0x
Pawel Osciak
2016/10/11 06:13:45
Done.
| |
274 fmtdesc.pixelformat) == pixelformats + num_formats) | 319 << fmtdesc.description; |
320 pixelformats.push_back(fmtdesc.pixelformat); | |
321 } | |
322 | |
323 return pixelformats; | |
324 } | |
325 | |
326 VideoDecodeAccelerator::SupportedProfiles | |
327 V4L2Device::EnumerateSupportedDecodeProfiles(const size_t num_formats, | |
328 const uint32_t pixelformats[]) { | |
329 VideoDecodeAccelerator::SupportedProfiles profiles; | |
330 | |
331 const auto& supported_pixelformats = | |
332 EnumerateSupportedPixelformats(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE); | |
333 | |
334 for (uint32_t pixelformat : supported_pixelformats) { | |
335 if (std::find(pixelformats, pixelformats + num_formats, pixelformat) == | |
336 pixelformats + num_formats) | |
275 continue; | 337 continue; |
276 int min_profile, max_profile; | 338 |
277 switch (fmtdesc.pixelformat) { | 339 VideoDecodeAccelerator::SupportedProfile profile; |
278 case V4L2_PIX_FMT_H264: | 340 GetSupportedResolution(pixelformat, &profile.min_resolution, |
279 case V4L2_PIX_FMT_H264_SLICE: | |
280 min_profile = H264PROFILE_MIN; | |
281 max_profile = H264PROFILE_MAX; | |
282 break; | |
283 case V4L2_PIX_FMT_VP8: | |
284 case V4L2_PIX_FMT_VP8_FRAME: | |
285 min_profile = VP8PROFILE_MIN; | |
286 max_profile = VP8PROFILE_MAX; | |
287 break; | |
288 case V4L2_PIX_FMT_VP9: | |
289 case V4L2_PIX_FMT_VP9_FRAME: | |
290 min_profile = VP9PROFILE_MIN; | |
291 max_profile = VP9PROFILE_MAX; | |
292 break; | |
293 default: | |
294 NOTREACHED() << "Unhandled pixelformat " << std::hex | |
295 << fmtdesc.pixelformat; | |
296 return profiles; | |
297 } | |
298 GetSupportedResolution(fmtdesc.pixelformat, &profile.min_resolution, | |
299 &profile.max_resolution); | 341 &profile.max_resolution); |
300 for (int media_profile = min_profile; media_profile <= max_profile; | 342 |
301 ++media_profile) { | 343 const auto video_codec_profiles = |
302 profile.profile = static_cast<VideoCodecProfile>(media_profile); | 344 V4L2PixFmtToVideoCodecProfiles(pixelformat, false); |
345 | |
346 for (const auto& video_codec_profile : video_codec_profiles) { | |
347 profile.profile = video_codec_profile; | |
303 profiles.push_back(profile); | 348 profiles.push_back(profile); |
349 | |
350 DVLOG(1) << "Found decoder profile " << GetProfileName(profile.profile) | |
351 << ", resolutions: " << profile.min_resolution.ToString() << " " | |
352 << profile.max_resolution.ToString(); | |
304 } | 353 } |
305 } | 354 } |
355 | |
306 return profiles; | 356 return profiles; |
307 } | 357 } |
308 | 358 |
309 bool V4L2Device::SupportsDecodeProfileForV4L2PixelFormats( | 359 VideoEncodeAccelerator::SupportedProfiles |
310 VideoCodecProfile profile, | 360 V4L2Device::EnumerateSupportedEncodeProfiles() { |
311 const size_t num_formats, | 361 VideoEncodeAccelerator::SupportedProfiles profiles; |
312 const uint32_t pixelformats[]) { | |
313 // Get all supported profiles by this device, taking into account only fourccs | |
314 // in pixelformats. | |
315 const auto supported_profiles = | |
316 GetSupportedDecodeProfiles(num_formats, pixelformats); | |
317 | 362 |
318 // Try to find requested profile among the returned supported_profiles. | 363 const auto& supported_pixelformats = |
319 const auto iter = std::find_if( | 364 EnumerateSupportedPixelformats(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); |
320 supported_profiles.begin(), supported_profiles.end(), | |
321 [profile](const VideoDecodeAccelerator::SupportedProfile& p) { | |
322 return profile == p.profile; | |
323 }); | |
324 | 365 |
325 return iter != supported_profiles.end(); | 366 for (const auto& pixelformat : supported_pixelformats) { |
367 VideoEncodeAccelerator::SupportedProfile profile; | |
368 profile.max_framerate_numerator = 30; | |
369 profile.max_framerate_denominator = 1; | |
370 gfx::Size min_resolution; | |
371 GetSupportedResolution(pixelformat, &min_resolution, | |
372 &profile.max_resolution); | |
373 | |
374 const auto video_codec_profiles = | |
375 V4L2PixFmtToVideoCodecProfiles(pixelformat, true); | |
376 | |
377 for (const auto& video_codec_profile : video_codec_profiles) { | |
378 profile.profile = video_codec_profile; | |
379 profiles.push_back(profile); | |
380 | |
381 DVLOG(1) << "Found encoder profile " << GetProfileName(profile.profile) | |
382 << ", max resolution: " << profile.max_resolution.ToString(); | |
383 } | |
384 } | |
385 | |
386 return profiles; | |
326 } | 387 } |
327 | 388 |
328 } // namespace media | 389 } // namespace media |
OLD | NEW |