移植HM到Android实现播放器 - Fla

移植HM到Android实现播放器

Fla posted @ 2013年4月09日 20:48 with tags Android HEVC HM H.265解码器 , 2205 阅读

我的CSDN博客中详细介绍了其相关内容:

http://blog.csdn.net/luofl1992/article/details/8736149

详细的Android.mk如何编写,之前的文章中有所介绍,这里不再重复介绍了。

本文为相关的代码。

移植到Android不可能完全用java写一个解码器,毕竟java本身效率就不高。

再来笔者的经历有限,不可能在一个月之内重写出HM解码器的代码。

所以考虑其他方式,好在Google公司提供了Android NDK工具,可以帮我们把C/C++程序编译成Android上可用的动态库。

在我的CSDN博客中详细介绍了其相关内容:

http://blog.csdn.net/luofl1992/article/details/8736149

详细的Android.mk如何编写,之前的文章中有所介绍,这里不再重复介绍了。

这里不再详细叙述,此处列出关键的接口代码,这是我的毕业设计的工作的一部分,希望大家慎重使用:

/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class com_NcHevc_NcHevcDecoder */

#ifndef _Included_com_NcHevc_NcHevcDecoder
#define _Included_com_NcHevc_NcHevcDecoder

#include "TLibCommon/CommonDef.h"

typedef unsigned char byte;	// typedefine type as unsigned char
// 实现 Yuv 到 RGB 转换的函数
extern void NcDecoderYUV2RGB(Pel *y, Pel *u, Pel *v, Int stride, Int cstride, UInt *lpDiBits, int width, int height);
	
#ifdef __cplusplus
extern "C" {
#endif
/*
 * Class:     com_NcHevc_NcHevcDecoder
 * Method:    Open
 * Signature: (Ljava/lang/String;)Z
 */
JNIEXPORT jboolean JNICALL Java_com_NcHevc_NcHevcDecoder_Open
  (JNIEnv *, jobject, jstring);

/*
 * Class:     com_NcHevc_NcHevcDecoder
 * Method:    GetPixelsBuffer
 * Signature: ([I)V
 */
JNIEXPORT void JNICALL Java_com_NcHevc_NcHevcDecoder_GetPixelsBuffer
  (JNIEnv *, jobject, jintArray);

/*
 * Class:     com_NcHevc_NcHevcDecoder
 * Method:    DecodeFrame
 * Signature: ()Z
 */
JNIEXPORT jboolean JNICALL Java_com_NcHevc_NcHevcDecoder_DecodeFrame
  (JNIEnv *, jobject);

#ifdef __cplusplus
}
#endif
#endif

然后是源文件的实现。

#include "NcHevcDecoder.h"

#include <list>
#include <vector>
#include <stdio.h>
#include <fcntl.h>
#include <assert.h>
#include <locale.h>
#include <string>
using namespace std;

#include "TAppDecTop.h"
#include "TLibDecoder/AnnexBread.h"
#include "TLibDecoder/NALread.h"


Bool g_md5_mismatch = false;  // top level flag to signal when there is a decode problem

class CNcMHevcDecoder : public TAppDecTop
{
protected:
	ifstream bitstreamFile;
	InputByteStream bytestream;
	Int                 poc;
	TComList<TComPic *> *pcListPic;
	Int width, height;
	UInt *lpDiBits;
	Bool recon_opened;		// reconstruction file not yet opened. (must be performed after SPS is seen)
	int last_tId;			// mark as the last tId which DecodeFrames() returned, initialed as -2.
//	static UINT ThreadDecode( LPVOID lpVoid );
	int DecodeFrames( void );	// decode a frame if last_tId isn't equal to -2

public:
	CNcMHevcDecoder()
		: bytestream(bitstreamFile)
		, poc(0), recon_opened(false), last_tId(-2), width(416), height(240)
	{
//		setlocale(LC_ALL, ".936");

		pcListPic = NULL;
		lpDiBits = NULL;
		LOGI("0. trying create file: aaaaa1.txt ");
		FILE *fp = fopen("/mnt/sdcard/aaaaa1.txt","rw");
		if ( NULL == fp )
		{
			LOGI("    0.1. trying create file failed: aaaaa1.txt ");
			return ;
		}
		else
		{
			LOGI("    0.2. create file success: aaaaa1.txt ");
			fprintf( fp, "123456789\n" );
			fclose( fp );
			LOGI("    0.3. write file success: aaaaa1.txt ");		
		}
		LOGW("HM software: Decoder version [%s] %s", NV_VERSION, NVM_ONOS);
		LOGW(NVM_BITS);
		LOGW(NVM_COMPILEDBY);
	}

	~CNcMHevcDecoder()
	{
		// delete buffers
		m_cTDecTop.deletePicBuffer();

		// destroy internal classes
		xDestroyDecLib();
		LOGI("    0.4. CNcMHevcDecoder destroyed! ");
		TAppDecTop::destroy();
	}
	// 返回下一帧的内容
	bool GetNextFrame( void );
	bool OpenFile( const char * lpFilePath);
	bool GetWidthHeight(int &w, int &h)
	{
		DecodeFrames();
		if ( pcListPic != NULL && pcListPic->size() )
		{
			TComPic* pcPic = *(pcListPic->begin());
			width = pcPic->getPicYuvRec()->getWidth();
			height = pcPic->getPicYuvRec()->getHeight();
			LOGI("Success: Frame width = %d, height = %d", width, height);
			w = width;
			h = height;
			return true;
		}
		else
		{
			LOGI("Error: Frame width = %d, height = %d", width, height);
			return false;
		}
	}
	void SetPixels(int *pixels)
	{
		lpDiBits = (UInt *)pixels;
	}

};

// 定义一个全局变量
CNcMHevcDecoder decoder;

/*
 * Class:     com_NcHevc_NcHevcDecoder
 * Method:    Open
 * Signature: (Ljava/lang/String;)Z
 */
JNIEXPORT jboolean JNICALL Java_com_NcHevc_NcHevcDecoder_Open
  (JNIEnv *env, jobject obj, jstring str)
{
/*
	JNIString str(env, string);
	JNIString s2(str);
	s2 += ".txt";
	FILE *fp = fopen( s2, "rw" );
	if ( NULL != fp )
	{
		fprintf( fp, "12345" );
		fclose(fp);
	}
	return decoder.OpenFile(str.GetChars());
*/
/*	LOGI("0. JNIEXPORT jboolean JNICALL Java_com_NcHevc_NcHevcDecoder_Open");
	string s("/mnt/sdcard/aaaa2.txt");
	LOGI("1. path: = %s", s.c_str());
	FILE *fp = fopen( s.c_str(), "rw" );
	if ( NULL != fp )
	{
		LOGI("2. file open succ %s", s.c_str());
		fprintf( fp, "12345" );
		fclose(fp);
	}
	else
	{
		LOGW("2. file open failed. %s", s.c_str());
	}
*/
	return decoder.OpenFile(env->GetStringUTFChars(str, NULL));
}

/*
 * Class:     com_MNcHevc_NcHevcDecoder
 * Method:    GetPixelsBuffer
 * Signature: ([I)V
 */
JNIEXPORT void JNICALL Java_com_NcHevc_NcHevcDecoder_GetPixelsBuffer
  (JNIEnv *env, jobject obj, jintArray pixels)
{
	int width = 412, height = 240;
	decoder.GetWidthHeight( width, height );
//	pixels = env->NewIntArray(width*height);
	int *arr = env->GetIntArrayElements(pixels, NULL);
	memset( arr, 0xffffffff, sizeof(int) * env->GetArrayLength(pixels));	// 2013-3-24: 0->0xffffffff
	LOGI( "pixels buffer = 0x%08x , size = %d", arr, env->GetArrayLength(pixels));
	arr[0] = width;
	arr[1] = height;
	decoder.SetPixels( arr );
}

/*
 * Class:     com_NcHevc_NcHevcDecoder
 * Method:    GetPixelsBuffer
 * Signature: ([I)V
 */
JNIEXPORT jboolean JNICALL Java_com_NcHevc_NcHevcDecoder_DecodeFrame
  (JNIEnv *env, jobject obj)
{
	return decoder.GetNextFrame();
}

// 一些函数的实现

bool CNcMHevcDecoder::OpenFile( const char *lpFilePath )
{
	TAppDecTop::create();
//	LOGI("1. CNcMHevcDecoder::OpenFile : %s", lpFilePath);
	if( !TAppDecTop::parseCfg( lpFilePath ) )
	{
		LOGI("2. CNcMHevcDecoder::OpenFile :parse Configure error : %s", lpFilePath);
		TAppDecTop::destroy();
		return false;
	}


	bitstreamFile.open( (char const *)m_pchBitstreamFile, ifstream::in | ifstream::binary );
	if( !bitstreamFile )
	{
		LOGI("3. CNcMHevcDecoder::OpenFile :reading file error : %s", lpFilePath);
		return false;
	}

	//LOGI("4. CNcMHevcDecoder::OpenFile : success ");
	// create & initialize internal classes
	xCreateDecLib();
	//LOGI("5. CNcMHevcDecoder::OpenFile : xCreateDecLib succ ");
	xInitDecLib();
	//LOGI("6. CNcMHevcDecoder::OpenFile : xInitDecLib succ ");
	m_iPOCLastDisplay += m_iSkipFrame;      // set the last displayed POC correctly for skip forward.

	//LOGI("7. CNcMHevcDecoder::OpenFile : Finished");
	return true;
}

int CNcMHevcDecoder::DecodeFrames( void )
{	
	// main decode loop
//	LOGI("----0. CNcMHevcDecoder::DecodeFrames()");
	if ( last_tId != -2 )
		return last_tId;	// last decoded frame not been handled by GetNextFrame().
	while ( !!bitstreamFile )
	{
		/* location serves to work around a design fault in the decoder, whereby
		 * the process of reading a new slice that is the first slice of a new frame
		 * requires the TDecTop::decode() method to be called again with the same
		 * nal unit. */
	//	LOGI("    1. CNcMHevcDecoder::DecodeFrames()");
		streampos location = bitstreamFile.tellg();
		AnnexBStats stats = AnnexBStats();
		Bool bPreviousPictureDecoded = false;

		vector<uint8_t> nalUnit;
		InputNALUnit nalu;
		byteStreamNALUnit( bytestream, nalUnit, stats );

		// call actual decoding function
		Bool bNewPicture = false;
	//	LOGI("----2. CNcMHevcDecoder::DecodeFrames()");
		if( nalUnit.empty() )
		{
			/* this can happen if the following occur:
			 *  - empty input file
			 *  - two back-to-back start_code_prefixes
			 *  - start_code_prefix immediately followed by EOF
			 */
//			LOGW( "----Warning: Attempt to decode an empty NAL unit\n" );
		}
		else
		{
		//	LOGI("----3. CNcMHevcDecoder::DecodeFrames()");
			read( nalu, nalUnit );
			if( ( m_iMaxTemporalLayer >= 0 && nalu.m_temporalId > m_iMaxTemporalLayer ) || !isNaluWithinTargetDecLayerIdSet( &nalu ) )
			{
			//	LOGI("----4. CNcMHevcDecoder::DecodeFrames()");
				if( bPreviousPictureDecoded )
				{
					bNewPicture = true;
					bPreviousPictureDecoded = false;
				}
				else
				{
					bNewPicture = false;
				}
			}
			else
			{
			//	LOGI("----5. CNcMHevcDecoder::DecodeFrames()");
				bNewPicture = m_cTDecTop.decode( nalu, m_iSkipFrame, m_iPOCLastDisplay );
				if( bNewPicture )
				{
					bitstreamFile.clear();
					/* location points to the current nalunit payload[1] due to the
					 * need for the annexB parser to read three extra bytes.
					 * [1] except for the first NAL unit in the file
					 *     (but bNewPicture doesn't happen then) */
					bitstreamFile.seekg( location - streamoff( 3 ) );
					bytestream.reset();
				}
				bPreviousPictureDecoded = true;
			}
		}
		if( bNewPicture || !bitstreamFile )
		{
			// 执行环路滤波
//			LOGI("----6. CNcMHevcDecoder::DecodeFrames() 执行环路滤波");
			m_cTDecTop.executeLoopFilters( poc, pcListPic);
		}

		if( pcListPic )
		{
//			LOGI("----7. CNcMHevcDecoder::DecodeFrames() pcListPic Not Null");
			if( m_pchReconFile && !recon_opened )
			{
				if( !m_outputBitDepthY )
				{
					m_outputBitDepthY = g_bitDepthY;
				}
				if( !m_outputBitDepthC )
				{
					m_outputBitDepthC = g_bitDepthC;
				}

				m_cTVideoIOYuvReconFile.open( m_pchReconFile, true, m_outputBitDepthY, m_outputBitDepthC, g_bitDepthY, g_bitDepthC ); // write mode
				recon_opened = true;
			}
			// write reconstruction to file
			if( bNewPicture )
			{
			/*	if( nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
					|| nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_N_LP
					|| nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA_N_LP
					|| nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_BLANT
					|| nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA ){
					LOGI("----8. CNcMHevcDecoder::DecodeFrames() new picture");
					//	xFlushOutput( pcListPic );	// 写解码结果到文件
				}
			*/
				// 这里修改成传递出去一个新的图像帧用于界面显示
			//	xWriteOutput( pcListPic, nalu.m_temporalId );
			//	LOGI("----9. CNcMHevcDecoder::DecodeFrames() new picture");
				return last_tId = nalu.m_temporalId;
			}
		}
	//	LOGI("    10. CNcMHevcDecoder::DecodeFrames() pcList NUll");
	}
//	LOGI("----11. CNcMHevcDecoder::DecodeFrames() decode error");
	return -1;
}

/*
* 把YUV转换成RGB
*/

inline byte clip(const Pel x)
{
	return (x > 255) ? 255 : (x < 0 ? 0 : x);
}

// 是否采用快速转换
#if 1
//#define FAST_TABLE_SEARCH_YUV2RGB
#ifdef FAST_TABLE_SEARCH_YUV2RGB
static const Pel dFAC_B[256] = {
	-226, -224, -222, -220, -219, -217, -215, -213, -212, -210, -208, -206, -205, -203, -201, -199, 
	-197, -196, -194, -192, -190, -189, -187, -185, -183, -182, -180, -178, -176, -174, -173, -171, 
	-169, -167, -166, -164, -162, -160, -158, -157, -155, -153, -151, -150, -148, -146, -144, -143, 
	-141, -139, -137, -135, -134, -132, -130, -128, -127, -125, -123, -121, -119, -118, -116, -114, 
	-112, -111, -109, -107, -105, -104, -102, -100, -98, -96, -95, -93, -91, -89, -88, -86, 
	-84, -82, -81, -79, -77, -75, -73, -72, -70, -68, -66, -65, -63, -61, -59, -57, 
	-56, -54, -52, -50, -49, -47, -45, -43, -42, -40, -38, -36, -34, -33, -31, -29, 
	-27, -26, -24, -22, -20, -18, -17, -15, -13, -11, -10, -8, -6, -4, -3, -1, 
	0, 2, 4, 5, 7, 9, 11, 12, 14, 16, 18, 19, 21, 23, 25, 27, 
	28, 30, 32, 34, 35, 37, 39, 41, 43, 44, 46, 48, 50, 51, 53, 55, 
	57, 58, 60, 62, 64, 66, 67, 69, 71, 73, 74, 76, 78, 80, 82, 83, 
	85, 87, 89, 90, 92, 94, 96, 97, 99, 101, 103, 105, 106, 108, 110, 112, 
	113, 115, 117, 119, 120, 122, 124, 126, 128, 129, 131, 133, 135, 136, 138, 140, 
	142, 144, 145, 147, 149, 151, 152, 154, 156, 158, 159, 161, 163, 165, 167, 168, 
	170, 172, 174, 175, 177, 179, 181, 183, 184, 186, 188, 190, 191, 193, 195, 197, 
	198, 200, 202, 204, 206, 207, 209, 211, 213, 214, 216, 218, 220, 221, 223, 225
};
static const Pel dFAC_R[256] = {
	-178, -177, -176, -174, -173, -171, -170, -169, -167, -166, -164, -163, -162, -160, -159, -157, 
	-156, -155, -153, -152, -150, -149, -148, -146, -145, -143, -142, -141, -139, -138, -136, -135, 
	-134, -132, -131, -129, -128, -127, -125, -124, -122, -121, -120, -118, -117, -115, -114, -113, 
	-111, -110, -108, -107, -106, -104, -103, -101, -100, -99, -97, -96, -94, -93, -92, -90, 
	-89, -87, -86, -85, -83, -82, -80, -79, -78, -76, -75, -73, -72, -71, -69, -68, 
	-66, -65, -63, -62, -61, -59, -58, -56, -55, -54, -52, -51, -49, -48, -47, -45, 
	-44, -42, -41, -40, -38, -37, -35, -34, -33, -31, -30, -28, -27, -26, -24, -23, 
	-21, -20, -19, -17, -16, -14, -13, -12, -10, -9, -7, -6, -5, -3, -2, 0, 
	0, 1, 3, 4, 6, 7, 8, 10, 11, 13, 14, 15, 17, 18, 20, 21, 
	22, 24, 25, 27, 28, 29, 31, 32, 34, 35, 36, 38, 39, 41, 42, 43, 
	45, 46, 48, 49, 50, 52, 53, 55, 56, 57, 59, 60, 62, 63, 64, 66, 
	67, 69, 70, 72, 73, 74, 76, 77, 79, 80, 81, 83, 84, 86, 87, 88, 
	90, 91, 93, 94, 95, 97, 98, 100, 101, 102, 104, 105, 107, 108, 109, 111, 
	112, 114, 115, 116, 118, 119, 121, 122, 123, 125, 126, 128, 129, 130, 132, 133, 
	135, 136, 137, 139, 140, 142, 143, 144, 146, 147, 149, 150, 151, 153, 154, 156, 
	157, 158, 160, 161, 163, 164, 165, 167, 168, 170, 171, 172, 174, 175, 177, 178
};
static const Pel dFAC_GU[256] = {
	44, 44, 43, 43, 43, 42, 42, 42, 41, 41, 41, 40, 40, 40, 39, 39, 
	39, 38, 38, 38, 37, 37, 36, 36, 36, 35, 35, 35, 34, 34, 34, 33, 
	33, 33, 32, 32, 32, 31, 31, 31, 30, 30, 30, 29, 29, 29, 28, 28, 
	28, 27, 27, 26, 26, 26, 25, 25, 25, 24, 24, 24, 23, 23, 23, 22, 
	22, 22, 21, 21, 21, 20, 20, 20, 19, 19, 19, 18, 18, 18, 17, 17, 
	17, 16, 16, 15, 15, 15, 14, 14, 14, 13, 13, 13, 12, 12, 12, 11, 
	11, 11, 10, 10, 10, 9, 9, 9, 8, 8, 8, 7, 7, 7, 6, 6, 
	6, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 1, 1, 1, 0, 
	0, 0, 0, 0, 0, -1, -1, -1, -2, -2, -2, -3, -3, -3, -4, -4, 
	-5, -5, -5, -6, -6, -6, -7, -7, -7, -8, -8, -8, -9, -9, -9, -10, 
	-10, -10, -11, -11, -11, -12, -12, -12, -13, -13, -13, -14, -14, -14, -15, -15, 
	-16, -16, -16, -17, -17, -17, -18, -18, -18, -19, -19, -19, -20, -20, -20, -21, 
	-21, -21, -22, -22, -22, -23, -23, -23, -24, -24, -24, -25, -25, -25, -26, -26, 
	-27, -27, -27, -28, -28, -28, -29, -29, -29, -30, -30, -30, -31, -31, -31, -32, 
	-32, -32, -33, -33, -33, -34, -34, -34, -35, -35, -35, -36, -36, -37, -37, -37, 
	-38, -38, -38, -39, -39, -39, -40, -40, -40, -41, -41, -41, -42, -42, -42, -43
};
static const Pel dFAC_GV[256] = {
	91, 91, 90, 89, 89, 88, 87, 86, 86, 85, 84, 84, 83, 82, 81, 81, 
	80, 79, 79, 78, 77, 76, 76, 75, 74, 74, 73, 72, 71, 71, 70, 69, 
	69, 68, 67, 66, 66, 65, 64, 64, 63, 62, 61, 61, 60, 59, 59, 58, 
	57, 56, 56, 55, 54, 54, 53, 52, 51, 51, 50, 49, 49, 48, 47, 46, 
	46, 45, 44, 44, 43, 42, 41, 41, 40, 39, 39, 38, 37, 36, 36, 35, 
	34, 34, 33, 32, 31, 31, 30, 29, 29, 28, 27, 26, 26, 25, 24, 24, 
	23, 22, 21, 21, 20, 19, 19, 18, 17, 16, 16, 15, 14, 14, 13, 12, 
	11, 11, 10, 9, 9, 8, 7, 6, 6, 5, 4, 4, 3, 2, 1, 1, 
	0, 0, 0, -1, -2, -3, -3, -4, -5, -5, -6, -7, -8, -8, -9, -10, 
	-10, -11, -12, -13, -13, -14, -15, -15, -16, -17, -18, -18, -19, -20, -20, -21, 
	-22, -23, -23, -24, -25, -25, -26, -27, -28, -28, -29, -30, -30, -31, -32, -33, 
	-33, -34, -35, -35, -36, -37, -38, -38, -39, -40, -40, -41, -42, -43, -43, -44, 
	-45, -45, -46, -47, -48, -48, -49, -50, -50, -51, -52, -53, -53, -54, -55, -55, 
	-56, -57, -58, -58, -59, -60, -60, -61, -62, -63, -63, -64, -65, -65, -66, -67, 
	-68, -68, -69, -70, -70, -71, -72, -73, -73, -74, -75, -75, -76, -77, -78, -78, 
	-79, -80, -80, -81, -82, -83, -83, -84, -85, -85, -86, -87, -88, -88, -89, -90
};
#endif

// TComPicYuv *pcPicYuv := 要转换YUV图像
// pRGB := bmp 的颜色开始
// 
void NcDecoderYUV2RGB(TComPicYuv *pcPicYuv, byte *pRGB, Int width, Int height)
{
	if ( NULL == pRGB )
		return;
	Pel *y = pcPicYuv->getLumaAddr(),	// 重建图像的 Y 颜色开始地址
		*u = pcPicYuv->getCbAddr(),		// 重建图像的 Cb 颜色开始地址
		*v = pcPicYuv->getCrAddr();		// 重建图像的 Cr 颜色开始地址
//	byte *pRGB = (byte *)lpDiBits;
	const Int dY = pcPicYuv->getStride();	// YUV 一行的一个y值对应下一行的同样位置的位置增量
	const Int dRGB = width << 2;		// bmp 一行的某个像素的下一行同样位置的像素值偏移
	const Int dY2 = dY + 1, dRGB2 = dRGB + 4;
	const Int d2Y = (dY << 1) - width;	// YUV Y 像素偏移值的修正
	const Int dUV = pcPicYuv->getCStride() - width / 2;	// YUV UV像素偏移值的修正
	Pel dB, dG, dR, Cb, Cr, y0, y1, y2, y3;
	Int i = 0, j = 0;
	height >>= 1 ;	width >>= 1 ;	// 行数和列数均减半	
	for ( j = 0; j < height; j++ )
	{
		for ( i = 0; i < width; i++ )
		{
			// ARGB_8888: 四个像素值依次为  BGRA (已经验证,同时A值应为255表示不透明)
		/*	
			dR = 1.13983 * ( *v - 128 );
			dG = -0.39465 * ( *u - 128 ) - 0.58060 * ( *v - 128 );
			dB = 2.03211 * ( *u - 128 );
		*/	// or
		
			// a fast mode using shifting.
			y0 = *y;		y1 = *(y+1);
			y2 = *(y+dY);	y3 = *(y+dY2);
#ifdef FAST_TABLE_SEARCH_YUV2RGB
			// 查表法速度最快,但和下面的运算法相比并不能加快很多 约提升2%
			Cb = *u;	Cr = *v;
			dB = dFAC_B[Cb];
			dR = dFAC_R[Cr];
			dG = dFAC_GU[Cb] + dFAC_GV[Cr];
#else
			Cb = *u - 128;	Cr = *v - 128;
			#if 1
			// 测试表明,以下三种:
			// 这种效率最优,即采用临时变量,可以节约运算操作次数
			dB = Cb + (Cb>>1) + (Cb>>2) + (Cb>>6);
			dR = Cr + (Cr>>2) + (Cr>>3);
			dG = - ((Cb>>2) + (Cb>>4) + (Cb>>5) ) - ((dR>>1) + (Cr>>5));	// - ((Cr>>1) + (Cr>>3) + (Cr>>4) + (Cr>>5))
			dR += (Cr>>5);	// Cr + (Cr>>2) + (Cr>>3) + (Cr>>5);
			#elif 1
			// 这种结果次优
			dB = Cb + (Cb>>1) + (Cb>>2) + (Cb>>6);
			dG = - ((Cb>>2) + (Cb>>4) + (Cb>>5) ) - ((Cr>>1) + (Cr>>3) + (Cr>>4) + (Cr>>5));
			dR = Cr + (Cr>>2) + (Cr>>3) + (Cr>>5);
			#else
			// 这种直接乘法的效率最低
			dR = 1.402* Cr;
			dG = -0.344* Cb - 0.714*Cr;
			dB = 1.772 * Cb;
			#endif
#endif
			*(  pRGB) = clip(y0 + dB);		// B
			*(pRGB+ 4 ) = clip(y1 + dB);
			*(pRGB+dRGB) = clip(y2 + dB);
			*(pRGB+dRGB2) = clip(y3 + dB);
			*(++pRGB) = clip(y0 + dG);	// G
			*(pRGB+ 4 ) = clip(y1 + dG);
			*(pRGB+dRGB) = clip(y2 + dG);
			*(pRGB+dRGB2) = clip(y3 + dG);
			*(++pRGB) = clip(y0 + dR);	// R
			*(pRGB+ 4 ) = clip(y1 + dR);
			*(pRGB+dRGB) = clip(y2 + dR);
			*(pRGB+dRGB2) = clip(y3 + dR);
			pRGB += 6;	// 总计两个RGBA像素,指针需要移动 8 个字节,补齐
			// 初始化中已经对 A 赋值,故此处不再处理 2013-3-24
			y+=2;
			u++;	v++;	// 横向两个点才变化一次,因为其表示了 2 * 2个点的像素值
		}	// for loop (i < width)
		pRGB += dRGB;	// 跳过一行,因为上面是两行两行的处理,每次处理了 4 个点
		y += d2Y;		// y 像素值的偏移位置修正(两行)
		u += dUV;		// u 像素值偏移的修正
		v += dUV;		// v 像素值偏移的修正
	}	// for loop (j < height)
}

#else
void NcDecoderYUV2RGB(TComPicYuv *pcPicYuv, byte *pRGB, int width, int height)
{
	if ( NULL == pRGB )
		return;		
	Pel *y = pcPicYuv->getLumaAddr(),	// 重建图像的 Y 颜色开始地址
		*u = pcPicYuv->getCbAddr(),		// 重建图像的 Cb 颜色开始地址
		*v = pcPicYuv->getCrAddr();		// 重建图像的 Cr 颜色开始地址
//	UInt *p = lpDiBits;
	UInt dy = pcPicYuv->getStride();		// 一行的一个y值对应下一行的同样位置的位置增量
	UInt dUV = (pcPicYuv->getCStride() - width / 2);	// 像素值偏移的修正
	Int dR = 0, dG = 0, dB = 0;
	for ( int j = 0; j < height; j++ )
	{
		for ( int i = 0; i < width; i+=2 )
		{
			// ARGB_8888: 四个像素值依次为  BGRA (已经验证,同时A值应为255表示不透明)
#if 1
			dR = 1.402*(*v-128);
			dG = -0.344*(*u-128) - 0.714*(*v-128);
			dB = 1.772 * ( *u - 128 );
#else
			dR = 1.13983 * ( *v - 128 );
			dG = -0.39465 * ( *u - 128 ) - 0.58060 * ( *v - 128 );
			dB = 2.03211 * ( *u - 128 );
#endif	
			*pRGB++ = clip(*y + dB);	// B
			*pRGB++ = clip(*y + dG);	// G
			*pRGB++ = clip(*y + dR);	// R
			*pRGB++; // = 255;		// A
			y++;
			*pRGB++ = clip(*y + dB);	// B
			*pRGB++ = clip(*y + dG);	// G
			*pRGB++ = clip(*y + dR);	// R
			*pRGB++;	//  = 255;		// A
			// 以下:用于测试各个位表示什么颜色 2013-3-14 已经验证按指针从低到高访问时依次为 BGRA
	/*		*pRGB++ = 255;	*pRGB++ = 0;	*pRGB++ = 0;	*pRGB++ = 128;		y++;
			*pRGB++ = 255;	*pRGB++ = 0;	*pRGB++ = 0;	*pRGB++ = 128;
			// 用32位来处理,效率反而降低了
			*p++ = ((UInt( *y + 1.772 * *u - 0xE2) & 0xff) << 0)	// B
				 | ((UInt( *y - 0.34413 * *u - 0.71414 * *v + 0x87) & 0xff) << 8)	// G
				 | ((UInt( *y + 1.402 * *v  - 0xAB) & 0xff) << 16 )	// R
				 | 0xff000000;
			y++;
			*p++ = ((UInt( *y + 1.772 * *u - 0xE2) & 0xff) << 0)	// B
				 | ((UInt( *y - 0.34413 * *u - 0.71414 * *v + 0x87) & 0xff) << 8)	// G
				 | ((UInt( *y + 1.402 * *v - 0xAB) & 0xff) << 16 )	// R
				 | 0xff000000;
*/				//		*p++ = 0xff0000ff;	y++;	*p++ = 0xff0000ff;
			y++;
			u++;	v++;	// 横向两个点才变化一次,因为其表示了 2 * 2个点的像素值
		}
		y += ( pcPicYuv->getStride() - width);	// 像素值的偏移位置修正
		if ( j % 2 == 0 )
		{
			u -= width >> 1;		v -= width >> 1;		// 偶数行不变
		}
		else
		{
			// 奇数行(从0开始编号)才变化,也就是到下一个偶数像素点时改变了
			u += dUV;	// 像素值偏移的修正
			v += dUV;	// 像素值偏移的修正
		}
	}	// for loop (j < height2)
}

#endif


bool CNcMHevcDecoder::GetNextFrame(void)
{
//	LOGE(">>>>0. CNcMHevcDecoder::GetNextFrame()");
	static long tm = 0;
	int tId = DecodeFrames();
	last_tId = -2;		// mark it as -2 to make DecodeFrames() be called the last time.
	if ( NULL == pcListPic )
		return false;
	TComList<TComPic*>::iterator iterPic = pcListPic->begin();
	Int not_displayed = 0;
	assert( lpDiBits != NULL );		// 检测是否为空指针

	while (iterPic != pcListPic->end())
	{
		TComPic* pcPic = *(iterPic);
		if(pcPic->getOutputMark() && pcPic->getPOC() > m_iPOCLastDisplay)
			not_displayed++;
		iterPic++;
	}
	if ( 0 == not_displayed && -1 == tId )
	{
		LOGE(">>>>>>>> HM software <<<<<<<<");
		LOGW("Decoder version [%s] %s", NV_VERSION, NVM_ONOS);
		LOGW(NVM_BITS);
		LOGW(NVM_COMPILEDBY);
		return false;
	}
	while (1)
	{
		iterPic   = pcListPic->begin();
		while (iterPic != pcListPic->end())
		{
			TComPic* pcPic = *(iterPic);		
		//	LOGI(">>>>1. CNcMHevcDecoder::GetNextFrame() not_displayed = %d, %d, %d, %d",
		//		not_displayed, pcPic->getNumReorderPics(tId), pcPic->getPOC(), m_iPOCLastDisplay );
			// if (  pcPic->getOutputMark() && (not_displayed >  pcPic->getNumReorderPics(tId) && pcPic->getPOC() > m_iPOCLastDisplay))
			if ( pcPic->getOutputMark() && not_displayed > pcPic->getNumReorderPics(tId) && pcPic->getPOC() > m_iPOCLastDisplay )
			{
				// write to file
				not_displayed--;
			//	LOGI(">>>>1.0 CNcMHevcDecoder::GetNextFrame() YUV to RGB");
				clock_t t = clock();
			#if 0
				NcDecoderYUV2RGB( pcPic->getPicYuvRec(), (byte *)lpDiBits, width, height);
			#else
				NcDecoderYUV2RGB( 
					pcPic->getPicYuvRec()->getLumaAddr(),	// y
					pcPic->getPicYuvRec()->getCbAddr(),		// u
					pcPic->getPicYuvRec()->getCrAddr(),		// v
					pcPic->getStride(), pcPic->getCStride(),	// strides
				 	lpDiBits, width, height);	// bmp 
			#endif
				tm += clock() - t;
				LOG("tm(YUV2RGB) = %5d.%03d", tm / 1000, tm % 1000);
				poc++;
			//	LOGI(">>>>1.1. poc %03d decoded. not_displayed = %d", poc, not_displayed);
				/*
				if ( m_pchReconFile )
				{
					CroppingWindow &crop = pcPic->getCroppingWindow(g);
					m_cTVideoIOYuvReconFile.write( pcPic->getPicYuvRec(), crop.getPicCropLeftOffset(), crop.getPicCropRightOffset(), crop.getPicCropTopOffset(), crop.getPicCropBottomOffset() );
				}

				// update POC of display order
				*/
				m_iPOCLastDisplay = pcPic->getPOC();
				// erase non-referenced picture in the reference picture list after display
				pcPic->setOutputMark(false);
				return true;
			}	// if not displayed > ...
			// 释放已经输出且无依赖关系的帧
			if ( !pcPic->getSlice(0)->isReferenced() && pcPic->getOutputMark() == false )
			{
			//	LOGI(">>>>1.2. poc %03d destroyed", poc);
				pcPic->destroy();
				pcListPic->erase( iterPic );
				iterPic = pcListPic->begin();
			}
			else
			{
				iterPic++;	// next decoded frame.			
			}
		} // while (iterPic != pcListPic->end())
		// 再解码一帧图像,以得到应输出显示的下一帧图像
		// 可以保证一定能够在某次循环中退出这个while(1)循环
		tId = DecodeFrames();
		last_tId = -2;
	}	// while ( 1 )
//	LOGI(">>>>2. CNcMHevcDecoder::GetNextFrame() Finished");
//	LOGE( ">>>> %d", tId != -1 );
	return false;
}

 

PS:欢迎访问本人的CSDN博客:http://blog.csdn.net/luofl1992

登录 *


loading captcha image...
(输入验证码)
or Ctrl+Enter
Host by is-Programmer.com | Power by Chito 1.3.3 beta | © 2007 LinuxGem | Design by Matthew "Agent Spork" McGee