将yuv 4:2:0文件转换为Rgb没有得到预期的输出



我使用的逻辑是读取缓冲区中的yuv文件,并使用3个指针指向Y、U、V组件。图像尺寸为1920*1080。

我是

  • 取4y像素作为相应的1U、1V像素
  • 提取整数中的像素值
  • 将Y、U、V转换为R、G、B,并将分量存储在RGB缓冲区中

但是视频输出不正确。输出有点像黑白

#include "stdafx.h"
#include <string>
#include <stdio.h>
unsigned char* g_pcRGBbuffer;
unsigned char* g_pcYUVBuffer;
int _tmain(int argc, _TCHAR* argv[])
{
int l_nSize = 1920 * 1080 * 1.5;
g_pcYUVBuffer = new unsigned char[l_nSize];
g_pcRGBbuffer = new unsigned char[1920 * 1080 * 3];
FILE* fp_source;
FILE* fp_rgb = NULL;
int l_nY, l_nU, l_nV;
double l_dR, l_dG, l_dB, l_ni;
fp_source = fopen("D:\Sample_1920x1080.yuv", "rb");
// converting yuv file to rgb file
if (fp_source)
{
fp_rgb = fopen("D:\Sample_1920x1080.rgb", "wb+");
while (!feof(fp_source))
{
fread(g_pcYUVBuffer, 1, l_nSize, fp_source);
unsigned char* l_pcY = g_pcYUVBuffer;
unsigned char* l_pcU = l_pcY + 1920 * 1080;
unsigned char* l_pcV = l_pcU + ((1920 * 1080) / 4);
unsigned char* l_pcRGBbuffer = g_pcRGBbuffer;
for (l_ni = 0; l_ni < (1920 * 1080) / 4; l_ni++)
{
l_nY = l_pcY[0];
l_nU = l_pcU[0];
l_nV = l_pcV[0];
l_dR = l_nY + 1.402 * (l_nV - 128);
l_dG = l_nY - 0.34414 * (l_nU - 128) - 0.71414 * (l_nV - 128);
l_dB = l_nY + 1.772 * (l_nU - 128);
// This prevents colour distortions in  rgb image
if (l_dR < 0)
l_dR = 0;
else if (l_dR > 255)
l_dR = 255;
if (l_dG < 0)
l_dG = 0;
else if (l_dG > 255)
l_dG = 255;
if (l_dB < 0)
l_dB = 0;
else if (l_dB > 255)
l_dB = 255;
// 1st pixel of RGB
l_pcRGBbuffer[0] = l_dR;
l_pcRGBbuffer[1] = l_dG;
l_pcRGBbuffer[2] = l_dB;
l_nY = l_pcY[1];
l_nU = l_pcU[0];
l_nV = l_pcV[0];
l_dR = l_nY + 1.402 * (l_nV - 128);
l_dG = l_nY - 0.34414 * (l_nU - 128) - 0.71414 * (l_nV - 128);
l_dB = l_nY + 1.772 * (l_nU - 128);
if (l_dR < 0)
l_dR = 0;
else if (l_dR > 255)
l_dR = 255;
if (l_dG < 0)
l_dG = 0;
else if (l_dG > 255)
l_dG = 255;
if (l_dB < 0)
l_dB = 0;
else if (l_dB > 255)
l_dB = 255;
// 2nd pixel of RGB
l_pcRGBbuffer[3] = l_dR;
l_pcRGBbuffer[4] = l_dG;
l_pcRGBbuffer[5] = l_dB;
l_nY = l_pcY[2];
l_nU = l_pcU[0];
l_nV = l_pcV[0];
l_dR = l_nY + 1.402 * (l_nV - 128);
l_dG = l_nY - 0.34414 * (l_nU - 128) - 0.71414 * (l_nV - 128);
l_dB = l_nY + 1.772 * (l_nU - 128);
if (l_dR < 0)
l_dR = 0;
else if (l_dR > 255)
l_dR = 255;
if (l_dG < 0)
l_dG = 0;
else if (l_dG > 255)
l_dG = 255;
if (l_dB < 0)
l_dB = 0;
else if (l_dB > 255)
l_dB = 255;
// 3rd pixel of RGB
l_pcRGBbuffer[6] = l_dR;
l_pcRGBbuffer[7] = l_dG;
l_pcRGBbuffer[8] = l_dB;
l_nY = l_pcY[3];
l_nU = l_pcU[0];
l_nV = l_pcV[0];
// l_dR = 1.164*(l_nY-16 ) + 1.596*(l_nV-128 );
// l_dG = 1.164*(l_nY-16 ) - 0.813*(l_nV-128 ) - 0.391*(l_nU-128);
// l_dB = 1.164*(l_nY-16 ) + 2.018*(l_nU-128 );
l_dR = l_nY + 1.402 * (l_nV - 128);
l_dG = l_nY - 0.34414 * (l_nU - 128) - 0.71414 * (l_nV - 128);
l_dB = l_nY + 1.772 * (l_nU - 128);
if (l_dR < 0)
l_dR = 0;
else if (l_dR > 255)
l_dR = 255;
if (l_dG < 0)
l_dG = 0;
else if (l_dG > 255)
l_dG = 255;
if (l_dB < 0)
l_dB = 0;
else if (l_dB > 255)
l_dB = 255;
// 4th pixel of RGB
l_pcRGBbuffer[9] = l_dR;
l_pcRGBbuffer[10] = l_dG;
l_pcRGBbuffer[11] = l_dB;
l_pcY += 4;
l_pcU += 1;
l_pcV += 1;
l_pcRGBbuffer += 12;
}
fwrite(g_pcRGBbuffer, 1, 1920 * 1080 * 3, fp_rgb);
}
printf("Video converted to rgb file n ");
}
else
{
printf("failn");
}
fclose(fp_rgb);
fclose(fp_source);
return 0;
}

实际上YUV 4:2:0平面先存储所有Y像素,然后存储U像素,然后再存储V像素要提取正确的像素,请使用以下论坛:

//Refer wikipedia for further details
size.total = size.width * size.height;
y = yuv[position.y * size.width + position.x];
u = yuv[(position.y / 2) * (size.width / 2) + (position.x / 2) + size.total];
v = yuv[(position.y / 2) * (size.width / 2) + (position.x / 2) + size.total +           (size.total / 4)];

YUV2RGBTestApp2.cpp:定义控制台应用程序的入口点。

#include "stdafx.h"
#include <string>
#include <stdio.h>
unsigned char *g_pcRGBbuffer;
unsigned char *g_pcYUVBuffer;
int  _tmain(int argc, _TCHAR* argv[])
{
int l_nSize = 1920 * 1080 * 1.5;
g_pcYUVBuffer = new unsigned char[l_nSize];
g_pcRGBbuffer = new unsigned char[1920 * 1080 * 3];
FILE *fp_source;
FILE *fp_rgb = NULL;
int l_ny, l_nu, l_nv, l_ni, RGBval;
int l_dr, l_dg, l_db;
fp_source = fopen("D:\Sample_1920x1080.yuv", "rb");
int l_nj;

//converting yuv file to rgb file
if (fp_source) {
fp_rgb = fopen("D:\Sample_1920x1080.rgb", "wb");
while (!feof(fp_source))
{
fread(g_pcYUVBuffer, 1, l_nSize, fp_source);
unsigned char *l_pcRGBbuffer = g_pcRGBbuffer;
for (int j = 0; j < 1080; j++)
{
for (int i = 0; i<1920; i++)
{
/*
Position  for y,u,v components for yuv planar 4:2:0  
Refer wikipedia for further reference  

*/
int Y = g_pcYUVBuffer[j * 1920 + i];
int U = g_pcYUVBuffer[((j / 2) * 960) + (i / 2) + (1920 * 1080)];
int V = g_pcYUVBuffer[((j / 2) * 960) + (i / 2) + (1920 * 1080) + ((1920 * 1080) / 4)];
int R = 1.164*(Y - 16) + 1.596*(V - 128);
int G = 1.164*(Y - 16) - 0.813*(V - 128) - 0.391*(U - 128);
int B = 1.164*(Y - 16) + 2.018*(U - 128);

if (R>255)R = 255;
if (R<0)R = 0;
if (G>255)G = 255;
if (G<0)G = 0;
if (B>255)B = 255;
if (B<0)B = 0;

l_pcRGBbuffer[0] = R;
l_pcRGBbuffer[1] = G;
l_pcRGBbuffer[2] = B;
l_pcRGBbuffer += 3;
}
}

fwrite(g_pcRGBbuffer, 1, 1920 * 1080 * 3, fp_rgb);
}
printf("Video converted to rgb file n ");
}
else {
printf("failn");
}
fclose(fp_rgb);
fclose(fp_source);
return 0;
}

最新更新