OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright (c) 2007-2008 Intel Corporation. All Rights Reserved. |
| 3 * |
| 4 * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 * copy of this software and associated documentation files (the |
| 6 * "Software"), to deal in the Software without restriction, including |
| 7 * without limitation the rights to use, copy, modify, merge, publish, |
| 8 * distribute, sub license, and/or sell copies of the Software, and to |
| 9 * permit persons to whom the Software is furnished to do so, subject to |
| 10 * the following conditions: |
| 11 * |
| 12 * The above copyright notice and this permission notice (including the |
| 13 * next paragraph) shall be included in all copies or substantial portions |
| 14 * of the Software. |
| 15 * |
| 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| 17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| 18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. |
| 19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR |
| 20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, |
| 21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
| 22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
| 23 */ |
| 24 |
| 25 /* |
| 26 * it is a real program to show how VAAPI encoding work, |
| 27 * It does H264 element stream level encoding on auto-generated YUV data |
| 28 * |
| 29 * gcc -o h264encode h264encode -lva -lva-x11 |
| 30 * ./h264encode -w <width> -h <height> -n <frame_num> |
| 31 * |
| 32 */ |
| 33 #include <stdio.h> |
| 34 #include <string.h> |
| 35 #include <stdlib.h> |
| 36 #include <getopt.h> |
| 37 #include <unistd.h> |
| 38 #include <sys/types.h> |
| 39 #include <sys/stat.h> |
| 40 #include <fcntl.h> |
| 41 #include <assert.h> |
| 42 #include <va/va.h> |
| 43 #ifdef ANDROID |
| 44 #include <va/va_android.h> |
| 45 #else |
| 46 #include <va/va_x11.h> |
| 47 #endif |
| 48 |
| 49 #define CHECK_VASTATUS(va_status,func) \ |
| 50 if (va_status != VA_STATUS_SUCCESS) { \ |
| 51 fprintf(stderr,"%s:%s (%d) failed,exit\n", __func__, func, __LINE__); \ |
| 52 exit(1); \ |
| 53 } |
| 54 |
| 55 #include "../loadsurface.h" |
| 56 #define SURFACE_NUM 18 /* 16 surfaces for src, 2 surface for reconstructed/refer
ence */ |
| 57 #define CODEDBUF_NUM 5 |
| 58 static VADisplay va_dpy; |
| 59 static VASurfaceID surface_id[SURFACE_NUM]; |
| 60 static VABufferID coded_buf[CODEDBUF_NUM]; |
| 61 static VAContextID context_id; |
| 62 static Display *x11_display; |
| 63 static int coded_fd; |
| 64 static char coded_file[256]; |
| 65 static int frame_width, frame_height; |
| 66 static int win_width; |
| 67 static int win_height; |
| 68 static int frame_display = 0; /* display the frame during encoding */ |
| 69 static int frame_rate = 30; |
| 70 static int frame_count = 400; |
| 71 static int intra_count = 30; |
| 72 static int frame_bitrate = 8000000; /* 8M */ |
| 73 static int initial_qp = 15; |
| 74 static int minimal_qp = 0; |
| 75 |
| 76 static int display_surface(int frame_id, int *exit_encode); |
| 77 |
| 78 static int upload_source_YUV_once_for_all() |
| 79 { |
| 80 void *surface_p=NULL, *U_start,*V_start; |
| 81 VAStatus va_status; |
| 82 int box_width=8; |
| 83 int row_shift=0; |
| 84 int i; |
| 85 |
| 86 for (i=0; i<SURFACE_NUM-2; i++) { |
| 87 printf("\rLoading data into surface %d.....", i); |
| 88 upload_surface(va_dpy, surface_id[i], box_width, row_shift, 0); |
| 89 |
| 90 row_shift++; |
| 91 if (row_shift==(2*box_width)) row_shift= 0; |
| 92 } |
| 93 printf("\n"); |
| 94 |
| 95 return 0; |
| 96 } |
| 97 |
| 98 |
| 99 static int save_coded_buf(VABufferID coded_buf, int current_frame, int frame_ski
pped) |
| 100 { |
| 101 void *coded_p=NULL; |
| 102 VACodedBufferSegment *buf_list = NULL; |
| 103 VAStatus va_status; |
| 104 unsigned int coded_size = 0; |
| 105 |
| 106 va_status = vaMapBuffer(va_dpy,coded_buf,(void **)(&buf_list)); |
| 107 CHECK_VASTATUS(va_status,"vaMapBuffer"); |
| 108 while (buf_list != NULL) { |
| 109 printf("Write %d bytes", buf_list->size); |
| 110 coded_size += write(coded_fd, buf_list->buf, buf_list->size); |
| 111 buf_list = (VACodedBufferSegment *) buf_list->next; |
| 112 } |
| 113 vaUnmapBuffer(va_dpy,coded_buf); |
| 114 |
| 115 printf("\r "); /* return back to startpoint */ |
| 116 switch (current_frame % 4) { |
| 117 case 0: |
| 118 printf("|"); |
| 119 break; |
| 120 case 1: |
| 121 printf("/"); |
| 122 break; |
| 123 case 2: |
| 124 printf("-"); |
| 125 break; |
| 126 case 3: |
| 127 printf("\\"); |
| 128 break; |
| 129 } |
| 130 printf("%08d", current_frame); |
| 131 if (current_frame % intra_count == 0) |
| 132 printf("(I)"); |
| 133 else |
| 134 printf("(P)"); |
| 135 |
| 136 printf("(%06d bytes coded)",coded_size); |
| 137 if (frame_skipped) |
| 138 printf("(SKipped)"); |
| 139 printf(" "); |
| 140 |
| 141 return 0; |
| 142 } |
| 143 |
| 144 |
| 145 enum { |
| 146 SH_LEVEL_1=10, |
| 147 SH_LEVEL_1B=11, |
| 148 SH_LEVEL_2=20, |
| 149 SH_LEVEL_3=30, |
| 150 SH_LEVEL_31=31, |
| 151 SH_LEVEL_32=32, |
| 152 SH_LEVEL_4=40, |
| 153 SH_LEVEL_5=50 |
| 154 }; |
| 155 |
| 156 static int do_h264_encoding(void) |
| 157 { |
| 158 VAEncPictureParameterBufferH264 pic_h264; |
| 159 VAEncSliceParameterBuffer slice_h264; |
| 160 VAStatus va_status; |
| 161 VABufferID seq_param_buf, pic_param_buf, slice_param_buf; |
| 162 int codedbuf_size; |
| 163 VASurfaceStatus surface_status; |
| 164 int src_surface, dst_surface, ref_surface; |
| 165 int codedbuf_idx = 0; |
| 166 int frame_skipped = 0; |
| 167 int i; |
| 168 |
| 169 |
| 170 va_status = vaCreateSurfaces(va_dpy,frame_width, frame_height, |
| 171 VA_RT_FORMAT_YUV420, SURFACE_NUM, &surface_id[0
]); |
| 172 CHECK_VASTATUS(va_status, "vaCreateSurfaces"); |
| 173 |
| 174 /* upload RAW YUV data into all surfaces */ |
| 175 upload_source_YUV_once_for_all(); |
| 176 |
| 177 codedbuf_size = (frame_width * frame_height * 400) / (16*16); |
| 178 |
| 179 for (i = 0; i < CODEDBUF_NUM; i++) { |
| 180 /* create coded buffer once for all |
| 181 * other VA buffers which won't be used again after vaRenderPicture. |
| 182 * so APP can always vaCreateBuffer for every frame |
| 183 * but coded buffer need to be mapped and accessed after vaRenderPicture
/vaEndPicture |
| 184 * so VA won't maintain the coded buffer |
| 185 */ |
| 186 va_status = vaCreateBuffer(va_dpy,context_id,VAEncCodedBufferType, |
| 187 codedbuf_size, 1, NULL, &coded_buf[i]); |
| 188 CHECK_VASTATUS(va_status,"vaBeginPicture"); |
| 189 } |
| 190 |
| 191 src_surface = 0; |
| 192 /* the last two frames are reference/reconstructed frame */ |
| 193 dst_surface = SURFACE_NUM - 1; |
| 194 ref_surface = SURFACE_NUM - 2; |
| 195 |
| 196 for (i = 0; i < frame_count; i++) { |
| 197 va_status = vaBeginPicture(va_dpy, context_id, surface_id[src_surface]); |
| 198 CHECK_VASTATUS(va_status,"vaBeginPicture"); |
| 199 |
| 200 if (i == 0) { |
| 201 VAEncSequenceParameterBufferH264 seq_h264; |
| 202 VABufferID seq_param_buf; |
| 203 |
| 204 seq_h264.level_idc = SH_LEVEL_3; |
| 205 seq_h264.picture_width_in_mbs = frame_width / 16; |
| 206 seq_h264.picture_height_in_mbs = frame_height / 16; |
| 207 seq_h264.bits_per_second = frame_bitrate; |
| 208 seq_h264.frame_rate = frame_rate; |
| 209 seq_h264.initial_qp = initial_qp; |
| 210 seq_h264.min_qp = minimal_qp; |
| 211 seq_h264.basic_unit_size = 0; |
| 212 seq_h264.intra_period = intra_count; |
| 213 |
| 214 va_status = vaCreateBuffer(va_dpy, context_id, |
| 215 VAEncSequenceParameterBufferType, |
| 216 sizeof(seq_h264),1,&seq_h264,&seq_param_b
uf); |
| 217 CHECK_VASTATUS(va_status,"vaCreateBuffer");; |
| 218 |
| 219 va_status = vaRenderPicture(va_dpy,context_id, &seq_param_buf, 1); |
| 220 CHECK_VASTATUS(va_status,"vaRenderPicture");; |
| 221 } |
| 222 |
| 223 |
| 224 pic_h264.reference_picture = surface_id[ref_surface]; |
| 225 pic_h264.reconstructed_picture= surface_id[dst_surface]; |
| 226 pic_h264.coded_buf = coded_buf[codedbuf_idx]; |
| 227 pic_h264.picture_width = frame_width; |
| 228 pic_h264.picture_height = frame_height; |
| 229 pic_h264.last_picture = (i==frame_count); |
| 230 |
| 231 va_status = vaCreateBuffer(va_dpy, context_id,VAEncPictureParameterBuffe
rType, |
| 232 sizeof(pic_h264),1,&pic_h264,&pic_param_buf); |
| 233 CHECK_VASTATUS(va_status,"vaCreateBuffer");; |
| 234 |
| 235 va_status = vaRenderPicture(va_dpy,context_id, &pic_param_buf, 1); |
| 236 CHECK_VASTATUS(va_status,"vaRenderPicture"); |
| 237 |
| 238 /* one frame, one slice */ |
| 239 slice_h264.start_row_number = 0; |
| 240 slice_h264.slice_height = frame_height/16; /* Measured by MB */ |
| 241 slice_h264.slice_flags.bits.is_intra = ((i % intra_count) == 0); |
| 242 slice_h264.slice_flags.bits.disable_deblocking_filter_idc = 0; |
| 243 va_status = vaCreateBuffer(va_dpy,context_id,VAEncSliceParameterBufferTy
pe, |
| 244 sizeof(slice_h264),1,&slice_h264,&slice_param
_buf); |
| 245 CHECK_VASTATUS(va_status,"vaCreateBuffer");; |
| 246 |
| 247 va_status = vaRenderPicture(va_dpy,context_id, &slice_param_buf, 1); |
| 248 CHECK_VASTATUS(va_status,"vaRenderPicture"); |
| 249 |
| 250 va_status = vaEndPicture(va_dpy,context_id); |
| 251 CHECK_VASTATUS(va_status,"vaEndPicture");; |
| 252 |
| 253 va_status = vaSyncSurface(va_dpy, surface_id[src_surface]); |
| 254 CHECK_VASTATUS(va_status,"vaSyncSurface"); |
| 255 |
| 256 surface_status = (VASurfaceStatus) 0; |
| 257 va_status = vaQuerySurfaceStatus(va_dpy, surface_id[src_surface],&surfac
e_status); |
| 258 frame_skipped = (surface_status & VASurfaceSkipped); |
| 259 |
| 260 save_coded_buf(coded_buf[codedbuf_idx], i, frame_skipped); |
| 261 |
| 262 /* should display reconstructed frame, but just diplay source frame */ |
| 263 if (frame_display) { |
| 264 int exit_encode = 0; |
| 265 |
| 266 display_surface(src_surface, &exit_encode); |
| 267 if (exit_encode) |
| 268 frame_count = i; |
| 269 } |
| 270 |
| 271 /* use next surface */ |
| 272 src_surface++; |
| 273 if (src_surface == (SURFACE_NUM - 2)) |
| 274 src_surface = 0; |
| 275 |
| 276 /* use next codedbuf */ |
| 277 codedbuf_idx++; |
| 278 if (codedbuf_idx == (CODEDBUF_NUM - 1)) |
| 279 codedbuf_idx = 0; |
| 280 |
| 281 /* if a frame is skipped, current frame still use last reference frame *
/ |
| 282 if (frame_skipped == 0) { |
| 283 /* swap ref/dst */ |
| 284 int tmp = dst_surface; |
| 285 dst_surface = ref_surface; |
| 286 ref_surface = tmp; |
| 287 } |
| 288 } |
| 289 |
| 290 return 0; |
| 291 } |
| 292 |
| 293 int main(int argc,char **argv) |
| 294 { |
| 295 VAEntrypoint entrypoints[5]; |
| 296 int num_entrypoints,slice_entrypoint; |
| 297 VAConfigAttrib attrib[2]; |
| 298 VAConfigID config_id; |
| 299 int major_ver, minor_ver; |
| 300 VAStatus va_status; |
| 301 char c; |
| 302 |
| 303 strcpy(coded_file, "/tmp/demo.264"); |
| 304 while ((c =getopt(argc,argv,"w:h:n:p:f:r:q:s:o:d?") ) != EOF) { |
| 305 switch (c) { |
| 306 case 'w': |
| 307 frame_width = atoi(optarg); |
| 308 break; |
| 309 case 'h': |
| 310 frame_height = atoi(optarg); |
| 311 break; |
| 312 case 'n': |
| 313 frame_count = atoi(optarg); |
| 314 break; |
| 315 case 'p': |
| 316 intra_count = atoi(optarg); |
| 317 break; |
| 318 case 'f': |
| 319 frame_rate = atoi(optarg); |
| 320 break; |
| 321 case 'b': |
| 322 frame_bitrate = atoi(optarg); |
| 323 break; |
| 324 case 'q': |
| 325 initial_qp = atoi(optarg); |
| 326 break; |
| 327 case 's': |
| 328 minimal_qp = atoi(optarg); |
| 329 break; |
| 330 case 'd': |
| 331 frame_display = 1; |
| 332 break; |
| 333 case 'o': |
| 334 strcpy(coded_file, optarg); |
| 335 break; |
| 336 case ':': |
| 337 case '?': |
| 338 printf("./h264encode <options>\n"); |
| 339 printf(" -w -h: resolution\n"); |
| 340 printf(" -n frame number\n"); |
| 341 printf(" -d display the source frame\n"); |
| 342 printf(" -p P frame count between two I frames\n"); |
| 343 printf(" -f frame rate\n"); |
| 344 printf(" -r bit rate\n"); |
| 345 printf(" -q initial QP\n"); |
| 346 printf(" -s maximum QP\n"); |
| 347 printf(" -o coded file\n"); |
| 348 exit(0); |
| 349 } |
| 350 } |
| 351 |
| 352 #ifdef ANDROID |
| 353 x11_display = (Display*)malloc(sizeof(Display)); |
| 354 *(x11_display) = 0x18c34078; |
| 355 #else |
| 356 x11_display = XOpenDisplay(":0.0"); |
| 357 #endif |
| 358 assert(x11_display); |
| 359 |
| 360 va_dpy = vaGetDisplay(x11_display); |
| 361 va_status = vaInitialize(va_dpy, &major_ver, &minor_ver); |
| 362 CHECK_VASTATUS(va_status, "vaInitialize"); |
| 363 |
| 364 vaQueryConfigEntrypoints(va_dpy, VAProfileH264Baseline, entrypoints, |
| 365 &num_entrypoints); |
| 366 for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypo
int++) { |
| 367 if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice) |
| 368 break; |
| 369 } |
| 370 if (slice_entrypoint == num_entrypoints) { |
| 371 /* not find Slice entry point */ |
| 372 assert(0); |
| 373 } |
| 374 |
| 375 /* find out the format for the render target, and rate control mode */ |
| 376 attrib[0].type = VAConfigAttribRTFormat; |
| 377 attrib[1].type = VAConfigAttribRateControl; |
| 378 vaGetConfigAttributes(va_dpy, VAProfileH264Baseline, VAEntrypointEncSlice, |
| 379 &attrib[0], 2); |
| 380 if ((attrib[0].value & VA_RT_FORMAT_YUV420) == 0) { |
| 381 /* not find desired YUV420 RT format */ |
| 382 assert(0); |
| 383 } |
| 384 if ((attrib[1].value & VA_RC_VBR) == 0) { |
| 385 /* Can't find matched RC mode */ |
| 386 printf("VBR mode doesn't found, exit\n"); |
| 387 assert(0); |
| 388 } |
| 389 attrib[0].value = VA_RT_FORMAT_YUV420; /* set to desired RT format */ |
| 390 attrib[1].value = VA_RC_VBR; /* set to desired RC mode */ |
| 391 |
| 392 va_status = vaCreateConfig(va_dpy, VAProfileH264Baseline, VAEntrypointEncSli
ce, |
| 393 &attrib[0], 2,&config_id); |
| 394 CHECK_VASTATUS(va_status, "vaCreateConfig"); |
| 395 |
| 396 va_status = vaCreateSurfaces(va_dpy,frame_width, frame_height, |
| 397 VA_RT_FORMAT_YUV420, SURFACE_NUM, &surface_id[0
]); |
| 398 CHECK_VASTATUS(va_status, "vaCreateSurfaces"); |
| 399 |
| 400 /* Create a context for this decode pipe */ |
| 401 va_status = vaCreateContext(va_dpy, config_id, |
| 402 frame_width, ((frame_height+15)/16)*16, |
| 403 VA_PROGRESSIVE,&surface_id[0],SURFACE_NUM,&conte
xt_id); |
| 404 CHECK_VASTATUS(va_status, "vaCreateContext"); |
| 405 |
| 406 /* store coded data into a file */ |
| 407 coded_fd = open(coded_file,O_CREAT|O_RDWR, 0); |
| 408 if (coded_fd == -1) { |
| 409 printf("Open file %s failed, exit\n", coded_file); |
| 410 exit(1); |
| 411 } |
| 412 |
| 413 printf("Coded %d frames, %dx%d, save the coded file into %s\n", |
| 414 frame_count, frame_width, frame_height, coded_file); |
| 415 do_h264_encoding(); |
| 416 |
| 417 printf("\n\n"); |
| 418 |
| 419 vaDestroySurfaces(va_dpy,&surface_id[0],SURFACE_NUM); |
| 420 vaDestroyContext(va_dpy,context_id); |
| 421 vaDestroyConfig(va_dpy,config_id); |
| 422 |
| 423 vaTerminate(va_dpy); |
| 424 |
| 425 #ifdef ANDROID |
| 426 free(x11_display); |
| 427 #else |
| 428 XCloseDisplay(x11_display); |
| 429 #endif |
| 430 |
| 431 return 0; |
| 432 } |
OLD | NEW |