进行Android NDK开发,最好用的还是Eclipse.新版本由于存在找不到头文件的问题,博主之前一直使用Eclipse Indigo. 使用下面的解决方法后,Juno,Kepler,Luna都正常了。

以下摘录自stackoverflow

原帖地址:http://stackoverflow.com/questions/23122934/eclipse-adt-unresolved-inclusion-jni-h

I ran into a similar problem with a working project with Android NDK-based code after updating to Eclipse Kepler. I observed similar things: the header files would correctly be listed under “includes” in the project, the actual build (via ndk-build) worked fine, but Eclipse’s editor couldn’t locate any headers in standard system directories (all headers with < > brackets).继续阅读

原创文章,转载请注明: 转载自贝壳博客

本文链接地址: [转]解决更新Eclipse后,Android NDK开发找不到头文件的问题

最近有个海康DVR转rtmp的项目,准备用openh264试试,相比x264性能非常强,但相同码率下画质不如x264

void CALLBACK DecCBFun(long nPort,char * pBuf,long nSize,FRAME_INFO * pFrameInfo, long nUser,long nReserved2)
{
    hik_player_t *hp = (hik_player_t *)nUser;
    long lFrameType = pFrameInfo->nType;
    if (lFrameType ==T_AUDIO16)
    {
        printf("Audio nStamp:%d  size:%d pFrameInfo:%dn",pFrameInfo->nStamp,nSize,pFrameInfo->nFrameRate);

    }

    else if(lFrameType ==T_YV12)
    {
        if(hp->in_width == 0 || hp->in_height == 0) {
            hp->in_width = pFrameInfo->nWidth;
            hp->in_height = pFrameInfo->nHeight;
            avpicture_alloc(&hp->in_yuv, AV_PIX_FMT_YUV420P, hp->in_width, hp->in_height);
            avpicture_alloc(&hp->out_yuv, AV_PIX_FMT_YUV420P, hp->out_width, hp->out_height);
            WelsCreateSVCEncoder(&hp->encoder_);

            //initilize with basic parameter
            SEncParamExt param;
            memset (¶m, 0, sizeof (SEncParamBase));

            hp->encoder_->GetDefaultParams(¶m);
            param.iUsageType = CAMERA_VIDEO_REAL_TIME;
            param.fMaxFrameRate = pFrameInfo->nFrameRate;
            param.iPicWidth = hp->out_width;
            param.iPicHeight = hp->out_height;
            param.iTargetBitrate = hp->out_bitrate;
            param.iRCMode                    = RC_QUALITY_MODE;
            param.iTemporalLayerNum          = 1;
            param.iSpatialLayerNum           = 1;
            param.bEnableDenoise             = 0;
            param.bEnableBackgroundDetection = 1;
            param.bEnableAdaptiveQuant       = 1;
//            param.bEnableFrameSkip           = 0;
            param.bEnableLongTermReference   = 0;
            param.iLtrMarkPeriod             = 30;
            param.uiIntraPeriod              = (unsigned int)pFrameInfo->nFrameRate*2;
            param.eSpsPpsIdStrategy          = CONSTANT_ID;
            param.bPrefixNalAddingCtrl       = 0;
            param.sSpatialLayers[0].iVideoWidth         = param.iPicWidth;
            param.sSpatialLayers[0].iVideoHeight        = param.iPicHeight;
            param.sSpatialLayers[0].fFrameRate          = param.fMaxFrameRate;
            param.sSpatialLayers[0].iSpatialBitrate     = param.iTargetBitrate;
            param.sSpatialLayers[0].iMaxSpatialBitrate  = param.iMaxBitrate;

            hp->encoder_->InitializeExt(¶m);
//            hp->encoder_->Initialize(¶m);
            //set option, set option during encoding process
//            int g_LevelSetting = 0;
//            hp->encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, &g_LevelSetting);
            int videoFormat = videoFormatI420;
            hp->encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, &videoFormat);

            hp->file = fopen("d:\out.h264","wb+");

        }
        //printf("Video nStamp:%d size:%d width:%d height:%d fps:%d n",pFrameInfo->nStamp,nSize,pFrameInfo->nWidth,pFrameInfo->nHeight,pFrameInfo->nFrameRate);

        avpicture_fill(&hp->in_yuv,(const uint8_t*)pBuf, AV_PIX_FMT_YUV420P,hp->in_width,hp->in_height);
        libyuv::I420Scale(hp->in_yuv.data[0],hp->in_yuv.linesize[0],
                          hp->in_yuv.data[1],hp->in_yuv.linesize[1],
                          hp->in_yuv.data[2],hp->in_yuv.linesize[2],
                          hp->in_width,hp->in_height,
                          hp->out_yuv.data[0],hp->out_yuv.linesize[0],
                          hp->out_yuv.data[1],hp->out_yuv.linesize[1],
                          hp->out_yuv.data[2],hp->out_yuv.linesize[2],
                          hp->out_width,hp->out_height,libyuv::kFilterNone);

        SFrameBSInfo info;
        memset(&info, 0, sizeof (SFrameBSInfo));
        SSourcePicture pic;
        memset (&pic, 0, sizeof (SSourcePicture));
        pic.iPicWidth = hp->out_width;
        pic.iPicHeight = hp->out_height;
        pic.iColorFormat = videoFormatI420;
        pic.iStride[0] = hp->out_yuv.linesize[0];
        pic.iStride[1] = pic.iStride[2] =  hp->out_yuv.linesize[1];
        pic.pData[0] = hp->out_yuv.data[0];
        pic.pData[1] = hp->out_yuv.data[1];
        pic.pData[2] = hp->out_yuv.data[2];
        int ret = hp->encoder_->EncodeFrame(&pic, &info);
        if(info.eFrameType != videoFrameTypeSkip ) {
            if(info.eFrameType == videoFrameTypeIDR) {
           //     printf("key framen");
            }
            int first_layer = 0;
            if(hp->have_spspps) {
                first_layer = info.iLayerNum - 1;
            } else {
                hp->have_spspps = true;
            }
            for(int i=first_layer;ifile);
                    pos+=*info.sLayerInfo[i].pNalLengthInByte;
                }

            }

            //
        }


        /*
         if (encoder_) {
            encoder_->Uninitialize();
            WelsDestroySVCEncoder (encoder_);
         }
         */
    }
    else
    {

    }
}

原创文章,转载请注明: 转载自贝壳博客

本文链接地址: 一段海康SDK解码回调后缩放并用openh264编码的代码

目前我使用的编译链
gcc: TDM-GCC-4.9.2
msys: 从MinGW中拷贝而来

./configure –target-os=win32 –arch=i686

windows下编译ffmpeg 2.6.1

windows下编译ffmpeg 2.6.1

开启NVENC硬件编码H.264
-enable-encoder=nvenc –enable-nvenc –enable-nonfree

ffmpeg 支持 nvenc 硬编码 H264

ffmpeg 支持 nvenc 硬编码 H264


继续阅读

原创文章,转载请注明: 转载自贝壳博客

本文链接地址: 如何在windows中编译ffmpeg 2.6.1 以及 NVENC硬编码的尝试