目录
usecases
pipeline
node
UsecaseAuto::Initialize
1. Initialize metadata Manager and initialize input client
2. Get the default matching usecase for the stream combination
3. Create pipeline and assign all pipeline parameters
4. Create session
5. Register metadata clients
6. get extension module instance for pipeline activationpExtensionModuleInstance->ActivatePipeline
UsecaseSelector::DefaultMatchingUsecaseSelection 获取对应 Usecaseid 对应的usecase数组
1、对分辨率进行判断
1)、获取 Usecase 的streamConfigMode
2)、获取Usecase 的 pChiUsecases 对数组UsecaseAuto_Targets中的 UsecaseAuto_TARGET_BUFFER_RAW1_target 的 ChiTarget BufferDimension 中的分辨进行判断 ,再根据 ChiStreamFormat UsecaseAuto_TARGET_BUFFER_RAW1_formats 与 pStreamConfig比较判断使用哪一个格式
2、如果有rawStream enable PruneRawTargetStrings
3、如果是YUV的stream enable PruneYUVTargetStrings
4、如果是PruneUBWCTp10的数据流 enble PruneUBWCTp10TargetStrings
5、如果是PreviewStreams enable PrunePreviewTargetStrings
6、确认m_enableAutoNoIPE是否配置
7、确认缩放裁剪是否enable
8、UsecaseMatches(g_UsecaseNameToUsecaseInstanceMap.at(“g_pUsecaseAuto”)))
解析g_SocIdNameToFunctionPointerMap获取pSelectedUsecase数组
pipeline 创建成功,Node:BPSIPEJPEGJPEG AGRREGATORLinks
Pipeline::CreateDescriptor
1、初始化m_pipelineDescriptor pipelineCreateData
2、解析ChiNode 中的node info 信息
1、pNodeProperties[i].pValues中保存的是支持的node 节点支持的算法信息com.qti.stats.pdlibwrapper com.qti.hvx.addconstant
2、获取HDR 模式信息 m_HDRInfo[logicalCameraId]
3、如果usecases(torch widget, AON),那么 IsNoBufferUsecase=true
4、当前场景中没使用torch widget, AON, auto usecase的 IsNoBufferUsecase = FALSE
5、获取帧率,判断是否支持HDR 模式
6、获取pLogicalCameraInfo 的m_cameraCaps.numSensorModes pSensorModeInfo
7、根据pLogicalCameraInfo判断支持那种模式的HDR
8、判断是否支持裁剪 根据设置传感器模式 设置分辨率
usecases
apps/qnx_ap/AMSS/multimedia/qcamera/camera_qcx/cdk_qcx/oem/qcom/topology/titan/sa8650/
sa8650_usecase.xml
pipeline
apps/qnx_ap/AMSS/multimedia/qcamera/camera_qcx/cdk_qcx/oem/qcom/topology/titan/usecase-components/usecases/UsecaseAuto/pipelines
camxAutoYUV.xml
引用RealTimeBaseAuto &StatsSegmentAuto node算法
node
segments/sa8650/
RealTimeBaseAuto.xml
com.qti.hvx.addconstant &com.qti.stats.pdlibwrapper 算法
在camx中ExtensionModule加载时初始化获取m_platformID
ExtensionModule::ExtensionModule()
m_platformID = SocUtils::GetSocId();
ChiPopulatePipelineData pFuncPopulatePipelineData =
reinterpret_cast(ChxUtils::LibGetAddr(m_chiUsecaseHandle, "PopulatePipelineData"));
if (NULL != pFuncPopulatePipelineData)
{
pFuncPopulatePipelineData(m_platformID);
}
else
{
CHX_LOG_ERROR("Failed to load PopulatePipelineData lib");
}
/// PopulatePipelineData
///
/// @brief Populate the global map variable with correct data from usecase xml generated file based on socId
///
/// @param socID [IN] SocId for current target
///
/// @return None
VOID UsecaseSelector::PopulatePipelineData(SocId socId)
{
OSLIBRARYHANDLE handle = ExtensionModule::GetInstance()->GetChiUsecaseSelectorHandle();
ChiPopulatePipelineData pFuncPopulatePipelineData =
reinterpret_cast(ChxUtils::LibGetAddr(handle, "PopulatePipelineData"));
if (NULL != pFuncPopulatePipelineData)
{
pFuncPopulatePipelineData(socId);
}
else
{
CHX_LOG_ERROR("Failed to load pFuncPopulatePipelineData lib");
}
}
获取对应平台的g_SocIdNameToFunctionPointerMap 配置
// PopulatePipelineData
extern "C" CAMX_VISIBILITY_PUBLIC VOID PopulatePipelineData(
SocId socId)
{
pFunc pPopulateUseCaseInfo = NULL;
FillMapdata();
switch (socId)
{
case SocId::SM8450:
pPopulateUseCaseInfo = g_SocIdNameToFunctionPointerMap.at("sm8450");
break;
case SocId::SA8650P:
pPopulateUseCaseInfo = g_SocIdNameToFunctionPointerMap.at("sa8650");
break;
case SocId::SA8630P:
pPopulateUseCaseInfo = g_SocIdNameToFunctionPointerMap.at("sa8630");
break;
default:
break;
}
if (NULL != pPopulateUseCaseInfo)
{
pPopulateUseCaseInfo();
}
else
{
CHX_LOG_ERROR("Error Failed to populate pipleine data");
}
}
cdk_qcx/oem/qcom/chiusecase/common/g_pipelines.cpp
void FillMapssa8650()
{
g_EnumNameToEnumValueMap["InvalidVariantGroup"] = sa8650::InvalidVariantGroup;
g_EnumNameToEnumValueMap["PruneGroupAEC"] = sa8650::PruneGroupAEC;
g_EnumNameToEnumValueMap["PruneGroupAWB"] = sa8650::PruneGroupAWB;
g_EnumNameToEnumValueMap["PruneGroupNoIPE"] = sa8650::PruneGroupNoIPE;
g_EnumNameToEnumValueMap["PruneGroupP010Format"] = sa8650::PruneGroupP010Format;
g_EnumNameToEnumValueMap["PruneGroupP010LSBFormat"] = sa8650::PruneGroupP010LSBFormat;
g_EnumNameToEnumValueMap["PruneGroupP01208Format"] = sa8650::PruneGroupP01208Format;
g_EnumNameToEnumValueMap["PruneGroupP01208LSBFormat"] = sa8650::PruneGroupP01208LSBFormat;
g_EnumNameToEnumValueMap["PruneGroupP01210Format"] = sa8650::PruneGroupP01210Format;
g_EnumNameToEnumValueMap["PruneGroupP01210LSBFormat"] = sa8650::PruneGroupP01210LSBFormat;
g_EnumNameToEnumValueMap["PruneGroupPlain16_12"] = sa8650::PruneGroupPlain16_12;
g_EnumNameToEnumValueMap["PruneGroupPreview"] = sa8650::PruneGroupPreview;
g_EnumNameToEnumValueMap["PruneGroupPreview2"] = sa8650::PruneGroupPreview2;
g_EnumNameToEnumValueMap["PruneGroupPreview3"] = sa8650::PruneGroupPreview3;
g_EnumNameToEnumValueMap["PruneGroupRGBIFormat"] = sa8650::PruneGroupRGBIFormat;
g_EnumNameToEnumValueMap["PruneGroupRGBPFormat"] = sa8650::PruneGroupRGBPFormat;
g_EnumNameToEnumValueMap["PruneGroupRaw1"] = sa8650::PruneGroupRaw1;
g_EnumNameToEnumValueMap["PruneGroupRaw16"] = sa8650::PruneGroupRaw16;
g_EnumNameToEnumValueMap["PruneGroupRaw2"] = sa8650::PruneGroupRaw2;
g_EnumNameToEnumValueMap["PruneGroupRaw3"] = sa8650::PruneGroupRaw3;
g_EnumNameToEnumValueMap["PruneGroupRaw4"] = sa8650::PruneGroupRaw4;
g_EnumNameToEnumValueMap["PruneGroupVideo"] = sa8650::PruneGroupVideo;
g_EnumNameToEnumValueMap["PruneGroupYuv"] = sa8650::PruneGroupYuv;
g_EnumNameToEnumValueMap["PruneGroupYuv2"] = sa8650::PruneGroupYuv2;
g_EnumNameToEnumValueMap["PruneGroupYuv3"] = sa8650::PruneGroupYuv3;
g_StringArrayNameToStringArray["g_stringMapVariantGroup"] = sa8650::g_stringMapVariantGroup;
g_EnumNameToEnumValueMap["g_stringMapVariantGroupSize"] = sizeof(sa8650::g_stringMapVariantGroup) / sizeof(sa8650::g_stringMapVariantGroup[0]);
g_EnumNameToEnumValueMap["InvalidVariantType"] = sa8650::InvalidVariantType;
g_EnumNameToEnumValueMap["PruneTypeDisabled"] = sa8650::PruneTypeDisabled;
g_EnumNameToEnumValueMap["PruneTypeEnabled"] = sa8650::PruneTypeEnabled;
g_StringArrayNameToStringArray["g_stringMapVariantType"] = sa8650::g_stringMapVariantType;
g_EnumNameToEnumValueMap["g_stringMapVariantTypeSize"] = sizeof(sa8650::g_stringMapVariantType) / sizeof(sa8650::g_stringMapVariantType[0]);
g_EnumNameToEnumValueMap["ICMSMono"] = sa8650::ICMSMono;
g_EnumNameToEnumValueMap["RawAuto"] = sa8650::RawAuto;
g_EnumNameToEnumValueMap["AutoNoIPE"] = sa8650::AutoNoIPE;
g_EnumNameToEnumValueMap["AutoICMS3"] = sa8650::AutoICMS3;
g_EnumNameToEnumValueMap["AutoICMS4"] = sa8650::AutoICMS4;
g_EnumNameToEnumValueMap["AutoICMS"] = sa8650::AutoICMS;
g_EnumNameToEnumValueMap["AutoICMS2"] = sa8650::AutoICMS2;
g_EnumNameToEnumValueMap["AutoFFCChiNode"] = sa8650::AutoFFCChiNode;
g_EnumNameToEnumValueMap["AutoFFCCVSOC"] = sa8650::AutoFFCCVSOC;
g_EnumNameToEnumValueMap["AutoSRV"] = sa8650::AutoSRV;
g_EnumNameToEnumValueMap["AutoFFC"] = sa8650::AutoFFC;
g_EnumNameToEnumValueMap["AutoFFCMeta"] = sa8650::AutoFFCMeta;
g_EnumNameToEnumValueMap["AutoOfflineIFE"] = sa8650::AutoOfflineIFE;
g_EnumNameToEnumValueMap["AutoYUVROI"] = sa8650::AutoYUVROI;
g_EnumNameToEnumValueMap["Auto"] = sa8650::Auto;
g_EnumNameToEnumValueMap["AutoYUV"] = sa8650::AutoYUV;
g_UsecaseCollectionNameToUsecaseCollectionMap["PerNumTargetUsecases"] =sa8650::PerNumTargetUsecases;
g_EnumNameToEnumValueMap["PerNumTargetUsecasesSize"] = sizeof(sa8650::PerNumTargetUsecases) / sizeof(sa8650::PerNumTargetUsecases[0]);
g_EnumNameToEnumValueMap["UsecaseICMSMonoId"] = sa8650::UsecaseICMSMonoId;
g_UsecaseNameToUsecaseInstanceMap["g_pUsecaseICMSMono"] = &(sa8650::Usecases1Target[sa8650::UsecaseICMSMonoId]);
g_UsecaseNameToUsecaseInstanceMap["Usecases1Target"] = sa8650::Usecases1Target;
g_EnumNameToEnumValueMap["UsecaseRawAutoId"] = sa8650::UsecaseRawAutoId;
g_UsecaseNameToUsecaseInstanceMap["g_pUsecaseRawAuto"] = &(sa8650::Usecases5Target[sa8650::UsecaseRawAutoId]);
g_UsecaseNameToUsecaseInstanceMap["Usecases5Target"] = sa8650::Usecases5Target;
g_EnumNameToEnumValueMap["UsecaseICMSId"] = sa8650::UsecaseICMSId;
g_UsecaseNameToUsecaseInstanceMap["g_pUsecaseICMS"] = &(sa8650::Usecases6Target[sa8650::UsecaseICMSId]);
g_UsecaseNameToUsecaseInstanceMap["Usecases6Target"] = sa8650::Usecases6Target;
g_EnumNameToEnumValueMap["UsecaseFFCChiNodeId"] = sa8650::UsecaseFFCChiNodeId;
g_UsecaseNameToUsecaseInstanceMap["g_pUsecaseFFCChiNode"] = &(sa8650::Usecases6Target[sa8650::UsecaseFFCChiNodeId]);
g_UsecaseNameToUsecaseInstanceMap["Usecases6Target"] = sa8650::Usecases6Target;
g_EnumNameToEnumValueMap["UsecaseFFCCVSOCId"] = sa8650::UsecaseFFCCVSOCId;
g_UsecaseNameToUsecaseInstanceMap["g_pUsecaseFFCCVSOC"] = &(sa8650::Usecases6Target[sa8650::UsecaseFFCCVSOCId]);
g_UsecaseNameToUsecaseInstanceMap["Usecases6Target"] = sa8650::Usecases6Target;
g_EnumNameToEnumValueMap["UsecaseSRVId"] = sa8650::UsecaseSRVId;
g_UsecaseNameToUsecaseInstanceMap["g_pUsecaseSRV"] = &(sa8650::Usecases7Target[sa8650::UsecaseSRVId]);
g_UsecaseNameToUsecaseInstanceMap["Usecases7Target"] = sa8650::Usecases7Target;
g_EnumNameToEnumValueMap["UsecaseFFCId"] = sa8650::UsecaseFFCId;
g_UsecaseNameToUsecaseInstanceMap["g_pUsecaseFFC"] = &(sa8650::Usecases8Target[sa8650::UsecaseFFCId]);
g_UsecaseNameToUsecaseInstanceMap["Usecases8Target"] = sa8650::Usecases8Target;
g_EnumNameToEnumValueMap["UsecaseAutoId"] = sa8650::UsecaseAutoId;
//UsecaseAutoId=0 UsecaseAuto 初始化usecase name pUsecaseName;
g_UsecaseNameToUsecaseInstanceMap["g_pUsecaseAuto"] = &(sa8650::Usecases18Target[sa8650::UsecaseAutoId]);
g_UsecaseNameToUsecaseInstanceMap["Usecases18Target"] = sa8650::Usecases18Target;
g_EnumNameToEnumValueMap["ChiMaxNumTargets"] =18;
}
apps/qnx_ap/AMSS/multimedia/qcamera/camera_qcx/cdk_qcx/oem/qcom/chiusecase/auto/chxusecaseselector.cpp
chxusecaseselector.cpp:
解析pStreamConfig 判断pipeline 的配置情况 && 解析g_SocIdNameToFunctionPointerMap 参数
UsecaseAuto::Initialize
1. Initialize metadata Manager and initialize input client
2. Get the default matching usecase for the stream combination
3. Create pipeline and assign all pipeline parameters
Pipeline::Createm_pPipelines[index]->GetDescriptorMetadata
result = m_pPipelines[index]->CreateDescriptor =Pipeline::CreateDescriptor
4. Create session
Session::Create
5. Register metadata clients
pPipeline->SetMetadataClientId(m_metadataClients[index])
6. get extension module instance for pipeline activation
pExtensionModuleInstance->ActivatePipeline
UsecaseSelector::DefaultMatchingUsecase(pStreamConfigPerPipeline, 0);调用各类平台的cdk_qcx/oem/qcom/chiusecase/(platform)/chxusecaseselector.cpp
GetDefaultMatchingUsecase获取usecase信息最终会调用到DefaultMatchingUsecaseSelection
DefaultMatchingUsecaseSelection根据pStreamConfig的分辨率 、格式 operation_mode选择对应支持的usecase
Pipeline::Create 创建pipe
// UsecaseSelector::DefaultMatchingUsecase
ChiUsecase* UsecaseSelector::DefaultMatchingUsecase(
camera3_stream_configuration_t* pStreamConfig,
UINT32 bpp)
{
ChiUsecase* pSelectedUsecase = NULL;
OSLIBRARYHANDLE handle = ExtensionModule::GetInstance()->GetChiUsecaseSelectorHandle();
if (NULL == handle)
{
CHX_LOG_ERROR("Failed to load ChiusecaseSelector lib");
}
else
{
CHX_LOG_INFO("ChiusecaseSelector able to load handle lib %p", handle);
}
ChiUsecaseSelector pFuncChiUsecaseSelector =
reinterpret_cast(ChxUtils::LibGetAddr(handle, "GetDefaultMatchingUsecase"));
if (NULL == pFuncChiUsecaseSelector)
{
CHX_LOG_ERROR("Failed to load pFuncChiUsecaseSelector lib");
}
else
{
pSelectedUsecase = pFuncChiUsecaseSelector(pStreamConfig, bpp);
}
return pSelectedUsecase;
}
UsecaseSelector::DefaultMatchingUsecaseSelection 获取对应 Usecaseid 对应的usecase数组
1、对分辨率进行判断
1)、获取 Usecase 的streamConfigMode
2)、获取Usecase 的 pChiUsecases 对数组UsecaseAuto_Targets中的 UsecaseAuto_TARGET_BUFFER_RAW1_target 的 ChiTarget BufferDimension 中的分辨进行判断 ,再根据 ChiStreamFormat UsecaseAuto_TARGET_BUFFER_RAW1_formats 与 pStreamConfig比较判断使用哪一个格式
2、如果有rawStream enable PruneRawTargetStrings
3、如果是YUV的stream enable PruneYUVTargetStrings
4、如果是PruneUBWCTp10的数据流 enble PruneUBWCTp10TargetStrings
5、如果是PreviewStreams enable PrunePreviewTargetStrings
6、确认m_enableAutoNoIPE是否配置
7、确认缩放裁剪是否enable
8、UsecaseMatches(g_UsecaseNameToUsecaseInstanceMap.at(“g_pUsecaseAuto”)))
解析g_SocIdNameToFunctionPointerMap获取pSelectedUsecase数组
// UsecaseSelector::DefaultMatchingUsecaseSelection
extern "C" CAMX_VISIBILITY_PUBLIC ChiUsecase* UsecaseSelector::DefaultMatchingUsecaseSelection(
camera3_stream_configuration_t* pStreamConfig,
UINT32 bpp)
{
ChiUsecase* pSelectedUsecase = NULL;
CDKResult result = CDKResultSuccess;
UINT32 numRawStreams = 0;
UINT32 numYUVStreams = 0;
UINT32 numUBWCStreams = 0;
UINT32 numPreviewStreams = 0;
BOOL isRaw16Stream = FALSE;
BOOL isRawPlain1612Stream = FALSE;
BOOL isNoIPEEnabled = FALSE;
BOOL isP01208Stream = FALSE;
BOOL isP01210Stream = FALSE;
BOOL isP01208LSBStream = FALSE;
BOOL isP01210LSBStream = FALSE;
BOOL isRGBIStream = FALSE;
BOOL isRGBPStream = FALSE;
BOOL isCropEnabled = FALSE;
BOOL isP010Stream = FALSE;
BOOL isP010LSBStream = FALSE;
UINT32 numMetaStreams = 0;
PruneSettings pruneSettings;
PruneVariant variants[MaxPruneTargetStrings]; // update MaxOtherStreamsPerPipeline when adding more pruneVariants
pruneSettings.numSettings = 0;
pruneSettings.pVariants = variants;
if (NULL == pStreamConfig)
{
CHX_LOG_ERROR("Failed to match usecase. pStreamConfig is NULL");
}
else
{
for (UINT32 streamIdx = 0; streamIdx num_streams; streamIdx++)
{
camera3_stream_t* pFwkStream = pStreamConfig->streams[streamIdx];
if (pFwkStream->height > MaxMetaHeight)
{
if (UsecaseSelector::IsRawStream(pFwkStream))
{
numRawStreams++;
}
//pStream->format
if (UsecaseSelector::IsRaw16Stream(pFwkStream))
{
// decrement the number of raw stream when using Raw 16
// it is also raw stream, but has different pruning
// so when numRawStream will decrement as much raw 16 Streams
// enabled.
numRawStreams--;
isRaw16Stream = TRUE;
}
//pStream->stream_type & pStream->format
if (UsecaseSelector::IsYUVOutStream(pFwkStream))
{
numYUVStreams++;
}
if (UsecaseSelector::IsPreviewStream(pFwkStream))
{
numPreviewStreams++;
}
if (UsecaseSelector::IsUBWCTP10Stream(pFwkStream))
{
numUBWCStreams++;
}
if (UsecaseSelector::IsP01208Stream(pFwkStream))
{
isP01208Stream = TRUE;
}
if (UsecaseSelector::IsP01210Stream(pFwkStream))
{
isP01210Stream = TRUE;
}
if (UsecaseSelector::IsP010Stream(pFwkStream))
{
isP010Stream = TRUE;
}
if (UsecaseSelector::IsP01208LSBStream(pFwkStream))
{
isP01208LSBStream = TRUE;
}
if (UsecaseSelector::IsP01210LSBStream(pFwkStream))
{
isP01210LSBStream = TRUE;
}
if (UsecaseSelector::IsP010LSBStream(pFwkStream))
{
isP010LSBStream = TRUE;
}
if (UsecaseSelector::IsRGBIStream(pFwkStream))
{
isRGBIStream = TRUE;
}
if (UsecaseSelector::IsRGBPStream(pFwkStream))
{
isRGBPStream = TRUE;
}
if (UsecaseSelector::IsRawPlain1612Stream(pFwkStream))
{
isRawPlain1612Stream++;
}
}
else
{
numMetaStreams++;
}
}
auto AddSetting = [&pruneSettings, &variants](const CHAR* pGroup, const CHAR* pType) -> VOID
{
VariantGroup group = GetVariantGroup(pGroup);
VariantType type = GetVariantType(pType);
if ((g_EnumNameToEnumValueMap.at("InvalidVariantGroup") != group) &&
(g_EnumNameToEnumValueMap.at("InvalidVariantType") != type))
{
if (pruneSettings.numSettings group = group;
pVariant->type = type;
}
else
{
CHX_LOG_ERROR("prunSettings added exceeds allowed max pruneSettings: %d", MaxPruneTargetStrings);
}
}
else
{
CHX_LOG_服务器托管网WARN("Invalid Prune Setting - Group: %s(%u) Setting: %s(%u)", pGroup, group, pType, type);
}
};
auto UsecaseMatches = [&](const ChiUsecase* const pUsecase) -> BOOL
{ //对分辨率进行判断
//获取 Usecase 的streamConfigMode
// 获取Usecase 的 pChiUsecases 对数组UsecaseAuto_Targets中的
//UsecaseAuto_TARGET_BUFFER_RAW1_target 的 ChiTarget BufferDimension 中的分辨进行判断
//再根据 ChiStreamFormat UsecaseAuto_TARGET_BUFFER_RAW1_formats 与 pStreamConfig比较判断使用哪一个格式
return IsMatchingUsecase(pStreamConfig, pUsecase, &pruneSettings);
};
// Pruning raw streams
for (UINT32 i = 0; i EnableAutoNoIPEpipeline();
CHX_LOG_INFO("numPreviewStreams %d, numUBWCStreams %d, numYUVStreams %d, numRawStreams %d",
numPreviewStreams, numUBWCStreams, numYUVStreams, numRawStreams);
AddSetting("Raw16", (TRUE == isRaw16Stream) ? "Enabled" : "Disabled");
AddSetting("Plain16_12", (TRUE == isRawPlain1612Stream) ? "Enabled" : "Disabled");
AddSetting("NoIPE", (TRUE == isNoIPEEnabled) ? "Enabled" : "Disabled");
AddSetting("AWB", (TRUE == ExtensionModule::GetInstance()->EnableAutoAWB()) ? "Enabled" : "Disabled");
AddSetting("AEC", (TRUE == ExtensionModule::GetInstance()->EnableAutoAEC()) ? "Enabled" : "Disabled");
AddSetting("P01208Format", (TRUE == isP01208Stream) ? "Enabled" : "Disabled");
AddSetting("P01210Format", (TRUE == isP01210Stream) ? "Enabled" : "Disabled");
AddSetting("P010Format", (TRUE == isP010Stream) ? "Enabled" : "Disabled");
AddSetting("P01208LSBFormat", (TRUE == isP01208LSBStream) ? "Enabled" : "Disabled");
AddSetting("P01210LSBFormat", (TRUE == isP01210LSBStream) ? "Enabled" : "Disabled");
AddSetting("P010LSBFormat", (TRUE == isP010LSBStream) ? "Enabled" : "Disabled");
AddSetting("RGBIFormat", (TRUE == isRGBIStream) ? "Enabled" : "Disabled");
AddSetting("RGBPFormat", (TRUE == isRGBPStream) ? "Enabled" : "Disabled");
for (UINT32 i = 0; i getCurrentSoc()) &&
TRUE == UsecaseMatches(g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseFFCChiNode")))
{
CHX_LOG_INFO("Selected FFC: ISOC chinode usecase");
pSelectedUsecase = g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseFF服务器托管网CChiNode");
}
else if ((2 == ExtensionModule::GetInstance()->getCurrentSoc()) &&
TRUE == UsecaseMatches(g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseFFCCVSOC")))
{
CHX_LOG_INFO("Selected FFC: CVSOC usecase");
pSelectedUsecase = g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseFFCCVSOC");
}
else
{
// Select ISOC usecase by default
if ( TRUE == UsecaseMatches(g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseFFC")))
{
CHX_LOG_INFO("Selected FFC usecase, FFC sensor %d",
ExtensionModule::GetInstance()->IsFFCSensor());
pSelectedUsecase = g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseFFC");
}
}
}
if (NULL == pSelectedUsecase)
{
if ((numYUVStreams > 0) || (numPreviewStreams > 0) || (TRUE == isRGBPStream) ||
(TRUE == isP01208Stream) || (TRUE == isP01210Stream) || (TRUE == isRGBIStream) ||
(numUBWCStreams> 0) || (TRUE == isRaw16Stream) || (TRUE == isP010Stream) ||
(TRUE == isP01208LSBStream) || (TRUE == isP01210LSBStream) || (TRUE == isP010LSBStream))
{
if (TRUE == UsecaseMatches(g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseAuto")))
{
pSelectedUsecase = g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseAuto");
}
}
else
{
if (TRUE == UsecaseMatches(g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseRawAuto")))
{
pSelectedUsecase = g_UsecaseNameToUsecaseInstanceMap.at("g_pUsecaseRawAuto");
}
}
}
if (NULL == pSelectedUsecase)
{
CHX_LOG_ERROR("Fatal: no Usecase Selected or Usecase Matching Failed");
}
else
{
// Handle more usecases, currently handling only RAW usecase.
UINT32 totalPipelineIdx = DefaultPipelineIdx;
UINT32 pipelineDescIdx[5] = {0};
ChiUsecase* pClonedUsecase = NULL;
// Select the right pipeline index based on override setting
if ((2 != enableICMS) || (1 != enableSRV))
{
totalPipelineIdx = DefaultPipelineIdx;
for (UINT32 i = 0; i numPipelines; i++)
{
if (1 == enableICMS)
{
if (0 == CdkUtils::StrCmp(pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName,
PruneICMSPipelineStrings[numPreviewStreams + numYUVStreams - 1]))
{
pipelineDescIdx[0] = i;
}
}
else if (1 == enableFFC)
{
if (TRUE == ExtensionModule::GetInstance()->IsFFCSensor())
{
CHX_LOG_INFO("Selected FFC usecase, FFC sensor, pipeline %s, i %d",
pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName, i);
if (0 == CdkUtils::StrCmp(pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName, "AutoFFCMeta"))
{
pipelineDescIdx[0] = i;
}
}
else if ((1 == ExtensionModule::GetInstance()->getCurrentSoc()) &&
(2 == ExtensionModule::GetInstance()->getCurrentSoc()))
{
if ((0 == CdkUtils::StrCmp(pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName, "AutoFFCChiNode")) ||
(0 == CdkUtils::StrCmp(pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName, "AutoFFCCVSOC")))
{
pipelineDescIdx[0] = i;
}
}
else
{
if (0 == CdkUtils::StrCmp(pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName, "AutoFFC"))
{
pipelineDescIdx[0] = i;
}
}
}
else if (TRUE == ExtensionModule::GetInstance()->EnableAutoNoIPEpipeline())
{
if ((TRUE == isCropEnabled) &&
(0 == CdkUtils::StrCmp(
pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName , "AutoYUVROI")))
{
pipelineDescIdx[0] = i;
break;
}
if ((0 == CdkUtils::StrCmp(
pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName, "AutoNoIPE")) ||
(0 == CdkUtils::StrCmp(
pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName , "AutoYUV")))
{
pipelineDescIdx[0] = i;
break;
}
}
else if (TRUE == ExtensionModule::GetInstance()->IsOfflineIFEEnabled())
{
if (0 == CdkUtils::StrCmp(
pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName, "AutoOfflineIFE"))
{
pipelineDescIdx[0] = i;
break;
}
}
else
{
if (0 == CdkUtils::StrCmp(pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName, "Auto"))
{
pipelineDescIdx[0] = i;
break;
}
}
}
}
// Prune
pClonedUsecase = UsecaseSelector::CloneUsecase(pSelectedUsecase, totalPipelineIdx, pipelineDescIdx);
result = UsecaseSelector::PruneUsecaseDescriptor(pClonedUsecase,
pruneSettings.numSettings,
pruneSettings.pVariants,
&pSelectedUsecase);
if (NULL != pClonedUsecase)
{
UsecaseSelector::DestroyUsecase(pClonedUsecase);
pClonedUsecase = NULL;
}
if (NULL != pSelectedUsecase)
{
// showing which pipelines are being selected for the usecase
for (UINT i = 0; i pUsecaseName,
pSelectedUsecase->pPipelineTargetCreateDesc[i].pPipelineName,
&pSelectedUsecase->pPipelineTargetCreateDesc[i]);
}
}
else
{
CHX_LOG_ERROR("Failed to match usecase. pSelectedUsecase is NULL");
}
}
}
return pSelectedUsecase;
}
Pipeline::Create
1)、CHX_NEW PipelinepPipeline->Initialize
2)、初始化pPipeline->m_pPipelineName = pName;
// Pipeline::Create
Pipeline* Pipeline::Create(
UINT32 cameraId,
PipelineType type,
const CHAR* pName)
{
Pipeline* pPipeline = CHX_NEW Pipeline;
if (NULL != pPipeline)
{
const UINT32 logicalCameraId = ExtensionModule::GetInstance()->GetLogicalCameraId(cameraId);
if (CDKResultSuccess != pPipeline->Initialize(cameraId, logicalCameraId, type))
{
CHX_LOG_ERROR("Failed to initialize %s", pName ? pName : "NULL");
CHX_DELETE pPipeline;
pPipeline = NULL;
}
else
{
pPipeline->m_pPipelineName = pName;
}
}
return pPipeline;
}
// Pipeline::Initialize
CDKResult Pipeline::Initialize(
UINT32 cameraId,
UINT32 logicalCameraId,
PipelineType type)
{
CDKResult result = CDKResultSuccess;
CHX_LOG_INFO("Initializing Pipeline with cameraId %u logicalCameraId %u, type:%d", cameraId, logicalCameraId, type);
m_cameraId = cameraId;
m_logicalCameraId = logicalCameraId;
m_type = type;
m_pipelineActivated = FALSE;
m_isDeferFinalizeNeeded = FALSE;
m_SensorModePickhint = {};
m_isNameAllocated = FALSE;
m_isSensorModeHintSet = FALSE;
m_numInputBuffers = 0;
m_pPipelineDescriptorMetadata = ChiMetadata::Create();
if (NULL == m_pPipelineDescriptorMetadata)
{
result = CDKResultENoMemory;
CHX_LOG_ERROR("Failed to allocate memory for Pipeline Metadata");
}
if (m_type == PipelineType::OfflinePreview)
{
m_numInputBuffers = 1; // Sensor - so no input buffer
m_numOutputBuffers = 1; // Preview
SetupRealtimePreviewPipelineDescriptor();
}
return result;
}
pipeline 创建成功,Node:BPSIPEJPEGJPEG AGRREGATORLinks
m_pipelineDescriptor.pNodes nodeId =65538
m_nodes[nodeIndex].nodeAllPorts.pInputPorts portId=8
m_nodes[nodeIndex].nodeAllPorts.pOutputPorts portId=0
m_pipelineDescriptor.pLinks
srcNode.nodeId=65538
m_links[0].numDestNodes = 1;
m_linkNodeDescriptors[0].nodeId = 2
/// Pipeline::SetupRealtimePreviewPipelineDescriptor
VOID Pipeline::SetupRealtimePreviewPipelineDescriptor()
{
m_pipelineDescriptor.size = sizeof(CHIPIPELINECREATEDESCRIPTOR);
m_pipelineDescriptor.numNodes = 1;
m_pipelineDescriptor.pNodes = &m_nodes[0];
m_pipelineDescriptor.numLinks = 1;
m_pipelineDescriptor.pLinks = &m_links[0];
m_pipelineDescriptor.isRealTime = FALSE;
// Nodes
UINT32 nodeIndex = 0;
#if 0
// ---------------------------------------------------------------------------
// ---------------------------------- BPS ------------------------------------
// ---------------------------------------------------------------------------
m_nodes[nodeIndex].nodeId = 65539;
m_nodes[nodeIndex].nodeInstanceId = 0;
m_nodes[nodeIndex].nodeAllPorts.numInputPorts = 1;
m_nodes[nodeIndex].nodeAllPorts.pInputPorts = &m_inputPorts[BPSNode];
m_nodes[nodeIndex].nodeAllPorts.numOutputPorts = 1;
m_nodes[nodeIndex].nodeAllPorts.pOutputPorts = &m_outputPorts[BPSNode];
// BPS output port
m_outputPorts[BPSNode].portId = 1;
m_outputPorts[BPSNode].isSinkPort = FALSE;
m_outputPorts[BPSNode].isOutputStreamBuffer = FALSE;
// BPS input port
m_inputPorts[BPSNode].portId = 0;
m_inputPorts[BPSNode].isInputStreamBuffer = TRUE;
// ---------------------------------------------------------------------------
// ---------------------------------- IPE ------------------------------------
// ---------------------------------------------------------------------------
nodeIndex++;
#endif
m_nodes[nodeIndex].nodeId = 65538;
m_nodes[nodeIndex].nodeInstanceId = 0;
m_nodes[nodeIndex].nodeAllPorts.numInputPorts = 1;
m_nodes[nodeIndex].nodeAllPorts.pInputPorts = &m_inputPorts[IPENode];
m_nodes[nodeIndex].nodeAllPorts.numOutputPorts = 1;
m_nodes[nodeIndex].nodeAllPorts.pOutputPorts = &m_outputPorts[IPENode];
// IPE output port
m_outputPorts[IPENode].portId = 8;
m_outputPorts[IPENode].isSinkPort = TRUE;
m_outputPorts[IPENode].isOutputStreamBuffer = TRUE;
// IPE input port
m_inputPorts[IPENode].portId = 0;
m_inputPorts[IPENode].isInputStreamBuffer = TRUE;
#if 0
// ---------------------------------------------------------------------------
// ---------------------------------- JPEG -----------------------------------
// ---------------------------------------------------------------------------
nodeIndex++;
m_nodes[nodeIndex].nodeId = 65537;
m_nodes[nodeIndex].nodeInstanceId = 0;
m_nodes[nodeIndex].nodeAllPorts.numInputPorts = 1;
m_nodes[nodeIndex].nodeAllPorts.pInputPorts = &m_inputPorts[JPEGNode];
m_nodes[nodeIndex].nodeAllPorts.numOutputPorts = 1;
m_nodes[nodeIndex].nodeAllPorts.pOutputPorts = &m_outputPorts[JPEGNode];
// JPEG output port
m_outputPorts[JPEGNode].portId = 1;
m_outputPorts[JPEGNode].isSinkPort = FALSE;
m_outputPorts[JPEGNode].isOutputStreamBuffer = FALSE;
// JPEG input port
m_inputPorts[JPEGNode].portId = 0;
m_inputPorts[JPEGNode].isInputStreamBuffer = FALSE;
// ---------------------------------------------------------------------------
// ---------------------------------- JPEG AGRREGATOR ------------------------
// ---------------------------------------------------------------------------
nodeIndex++;
m_nodes[nodeIndex].nodeId = 6;
m_nodes[nodeIndex].nodeInstanceId = 0;
m_nodes[nodeIndex].nodeAllPorts.numInputPorts = 1;
m_nodes[nodeIndex].nodeAllPorts.pInputPorts = &m_inputPorts[JPEGAgrregatorNode];
m_nodes[nodeIndex].nodeAllPorts.numOutputPorts = 1;
m_nodes[nodeIndex].nodeAllPorts.pOutputPorts = &m_outputPorts[JPEGAgrregatorNode];
// JPEG output port
m_outputPorts[JPEGAgrregatorNode].portId = 1;
m_outputPorts[JPEGAgrregatorNode].isSinkPort = TRUE;
m_outputPorts[JPEGAgrregatorNode].isOutputStreamBuffer = TRUE;
// JPEG input port
m_inputPorts[JPEGAgrregatorNode].portId = 0;
m_inputPorts[JPEGAgrregatorNode].isInputStreamBuffer = FALSE;
#endif
// ---------------------------------------------------------------------------
// --------------------------------- Links -----------------------------------
// ---------------------------------------------------------------------------
#if 0
// BPS --> IPE
m_links[0].srcNode.nodeId = 65539;
m_links[0].srcNode.nodeInstanceId = 0;
m_links[0].srcNode.nodePortId = 1;
m_links[0].numDestNodes = 1;
m_links[0].pDestNodes = &m_linkNodeDescriptors[0];
m_linkNodeDescriptors[0].nodeId = 65538;
m_linkNodeDescriptors[0].nodeInstanceId = 0;
m_linkNodeDescriptors[0].nodePortId = 0;
m_links[0].bufferProperties.bufferFlags = BufferMemFlagHw;
m_links[0].bufferProperties.bufferFormat = ChiFormatUBWCTP10;
m_links[0].bufferProperties.bufferHeap = BufferHeapIon;
m_links[0].bufferProperties.bufferQueueDepth = 8;
// IPE --> JPEG
m_links[1].srcNode.nodeId = 65538;
m_links[1].srcNode.nodeInstanceId = 0;
m_links[1].srcNode.nodePortId = 8;
m_links[1].numDestNodes = 1;
m_links[1].pDestNodes = &m_linkNodeDescriptors[1];
m_linkNodeDescriptors[1].nodeId = 65537;
m_linkNodeDescriptors[1].nodeInstanceId = 0;
m_linkNodeDescriptors[1].nodePortId = 0;
m_links[1].bufferProperties.bufferFlags = (BufferMemFlagHw | BufferMemFlagLockable);
m_links[1].bufferProperties.bufferFormat = ChiFormatYUV420NV12;
m_links[1].bufferProperties.bufferHeap = BufferHeapIon;
m_links[1].bufferProperties.bufferQueueDepth = 8;
// JPEG --> JPEG Agrregator
m_links[2].srcNode.nodeId = 65537;
m_links[2].srcNode.nodeInstanceId = 0;
m_links[2].srcNode.nodePortId = 1;
m_links[2].numDestNodes = 1;
m_links[2].pDestNodes = &m_linkNodeDescriptors[2];
m_linkNodeDescriptors[2].nodeId = 6;
m_linkNodeDescriptors[2].nodeInstanceId = 0;
m_linkNodeDescriptors[2].nodePortId = 0;
m_links[2].bufferProperties.bufferFlags = (BufferMemFlagHw | BufferMemFlagLockable);
m_links[2].bufferProperties.bufferFormat = ChiFormatYUV420NV12;
m_links[2].bufferProperties.bufferHeap = BufferHeapIon;
m_links[2].bufferProperties.bufferQueueDepth = 8;
// JPEG Aggregator --> Sink Buffer
m_links[3].srcNode.nodeId = 6;
m_links[3].srcNode.nodeInstanceId = 0;
m_links[3].srcNode.nodePortId = 1;
m_links[3].numDestNodes = 1;
m_links[3].pDestNodes = &m_linkNodeDescriptors[3];
m_linkNodeDescriptors[3].nodeId = 2;
m_linkNodeDescriptors[3].nodeInstanceId = 0;
m_linkNodeDescriptors[3].nodePortId = 0;
#endif
m_links[0].srcNode.nodeId = 65538;
m_links[0].srcNode.nodeInstanceId = 0;
m_links[0].srcNode.nodePortId = 8;
m_links[0].numDestNodes = 1;
m_links[0].pDestNodes = &m_linkNodeDescriptors[0];
m_linkNodeDescriptors[0].nodeId = 2;
m_linkNodeDescriptors[0].nodeInstanceId = 0;
m_linkNodeDescriptors[0].nodePortId = 0;
}
Pipeline::CreateDescriptor
1、初始化m_pipelineDescriptor pipelineCreateData
在UsecaseAuto::Initialize 解析UsecaseAuto_pipelines是初始化获取的
pipelineOutputBuffer[streamIdx].pStream = pSinkTargetDesc->pTarget->pChiStream;
pipelineOutputBuffer[streamIdx].pNodePort = pSinkTargetDesc->pNodePort;
pipelineOutputBuffer[streamIdx].numNodePorts = pSinkTargetDesc->numNodePorts;
2、解析ChiNode 中的node info 信息
1、pNodeProperties[i].pValues中保存的是支持的node 节点支持的算法信息com.qti.stats.pdlibwrapper com.qti.hvx.addconstant
2、获取HDR 模式信息 m_HDRInfo[logicalCameraId]
3、如果usecases(torch widget, AON),那么 IsNoBufferUsecase=true
4、当前场景中没使用torch widget, AON, auto usecase的 IsNoBufferUsecase = FALSE
5、获取帧率,判断是否支持HDR 模式
6、获取pLogicalCameraInfo 的m_cameraCaps.numSensorModes pSensorModeInfo
modeCount:
pLogicalCameraInfo->m_cameraCaps.numSensorModes
pAllModes:
pLogicalCameraInfo->pSensorModeInfo
7、根据pLogicalCameraInfo判断支持那种模式的HDR
1)、采用三曝光实现实时HDR预览
2)、Staggered HDR 行交织 HD
3)、MFHDR 多帧 HDR
4)、QHDR(Quad HDR,四像素HDR)
8、判断是否支持裁剪 根据设置传感器模式 设置分辨率
// Pipeline::CreateDescriptor
CDKResult Pipeline::CreateDescriptor()
{
CDKResult result = CDKResultSuccess;
PipelineCreateData pipelineCreateData = {};
m_pipelineDescriptor.isRealTime = HasSensorNode(&m_pipelineDescriptor);
// m_cameraId from usecase side must be correct, even for pipelines without sensor Node
m_pipelineDescriptor.cameraId = m_cameraId;
m_pipelineDescriptor.logicalCameraId = m_logicalCameraId;
m_pipelineDescriptor.context = m_context;
pipelineCreateData.pPipelineName = m_pPipelineName;
pipelineCreateData.numOutputs = m_numOutputBuffers;
pipelineCreateData.pOutputDescriptors = &m_pipelineOutputBuffer[0];
pipelineCreateData.numInputs = m_numInputBuffers;
pipelineCreateData.pInputOptions = &m_pipelineInputOptions[0];
pipelineCreateData.pPipelineCreateDescriptor = &m_pipelineDescriptor;
CHIPIPELINECREATEDESCRIPTOR* pCreateDesc = pipelineCreateData.pPipelineCreateDescriptor;
pCreateDesc->numBatchedFrames = ExtensionModule::GetInstance()->GetNumBatchedFrames(m_logicalCameraId);
pCreateDesc->HALOutputBufferCombined = ExtensionModule::GetInstance()->GetHALOutputBufferCombined();
pCreateDesc->maxFPSValue = ExtensionModule::GetInstance()->GetUsecaseMaxFPS(m_logicalCameraId);
const CHAR* pClientName = "Chi::Pipeline::CreateDescriptor";
SetTuningUsecase();
m_pPipelineDescriptorMetadata->AddReference(pClientName);
m_pipelineDescriptor.hPipelineMetadata = m_pPipelineDescriptorMetadata->GetHandle();
CHX_LOG_CORE_CFG("Pipeline[%s] pipeline pointer %p numInputs=%d, numOutputs=%d stream w x h: %d x %d "
"format: %d, numBatchedFrames: %d, HALOutputBufferCombined: %d maxFPSValue: %d cameraId: %d logicalCameraId:%d",
m_pPipelineName,
this,
pipelineCreateData.numInputs,
pipelineCreateData.numOutputs,
(NULL != pipelineCreateData.pOutputDescriptors->pStream) ? pipelineCreateData.pOutputDescriptors->pStream->width : 0,
(NULL != pipelineCreateData.pOutputDescriptors->pStream) ? pipelineCreateData.pOutputDescriptors->pStream->height : 0,
(NULL != pipelineCreateData.pOutputDescriptors->pStream) ? pipelineCreateData.pOutputDescriptors->pStream->format : 0,
pCreateDesc->numBatchedFrames,
pCreateDesc->HALOutputBufferCombined,
pCreateDesc->maxFPSValue,
pipelineCreateData.pPipelineCreateDescriptor->cameraId,
pipelineCreateData.pPipelineCreateDescriptor->logicalCameraId);
UINT32 enableSWMCTFwithReferenceFrame = ExtensionModule::GetInstance()->GetMCTFwithReferenceFrameStatus(m_logicalCameraId);
ChxUtils::SetVendorTagValue(m_pPipelineDescriptorMetadata,
VendorTag::SWMCTFEnableWithRef,
1,
&enableSWMCTFwithReferenceFrame);
UINT32 facialContourVersion = ExtensionModule::GetInstance()->GetFacialContourVersion(m_logicalCameraId);
ChxUtils::SetVendorTagValue(m_pPipelineDescriptorMetadata,
VendorTag::FacialContourVersion,
1,
&facialContourVersion);
// Update stats skip pattern in node property with value from override
//m_pipelineDescriptor.numNodes=3
for (UINT node = 0; node numProperties=1 2 4
//pChiNode->pNodeProperties=UsecaseAuto_AutoOfflineIFE_node0_0_properties
// UsecaseAuto_AutoOfflineIFE_node65536_1_properties
// UsecaseAuto_AutoOfflineIFE_node65536_0_properties
for (UINT i = 0; i numProperties; i++)
{
//pChiNode->pNodeProperties[i].id=1
switch(pChiNode->pNodeProperties[i].id)
{ //解析node的算法
//pNodeProperties[i].pValues = com.qti.stats.pdlibwrapper com.qti.hvx.addconstant
case NodePropertyStatsSkipPattern://6
m_statsSkipPattern = ExtensionModule::GetInstance()->GetStatsSkipPattern();
pChiNode->pNodeProperties[i].pValue = &m_statsSkipPattern;
break;
case NodePropertyEnableFOVC://16
m_enableFOVC = ExtensionModule::GetInstance()->EnableFOVCUseCase();
pChiNode->pNodeProperties[i].pValue = &m_enableFOVC;
break;
case NodePropertyNISInternalTrigger://21
m_isNISInternalTrigger = ExtensionModule::GetInstance()->IsInternalTriggered(m_logicalCameraId);
pChiNode->pNodeProperties[i].pValue = &m_isNISInternalTrigger;
break;
default:
break;
}
}
}
//初始化pCreatePipelineDescriptor m_hPipelineHandle
m_hPipelineHandle = ExtensionModule::GetInstance()->CreatePipelineDescriptor(&pipelineCreateData);
m_pPipelineDescriptorMetadata->ReleaseReference(pClientName);
if (NULL == m_hPipelineHandle)
{
result = CDKResultEFailed;
CHX_LOG_ERROR("Fail due to NULL pipeline handle");
}
else
{//获取HDR 模式信息 m_HDRInfo[logicalCameraId]
const HDRInfo& rHDRInfo = ExtensionModule::GetInstance()->GetHDRInfo(m_logicalCameraId);
HDRDeviceInfo* pLogicalHDRDeviceInfo = rHDRInfo.pLogicalHDRDeviceInfo;
ChiHDRModeInfo physicalHDRModeInfo =
ExtensionModule::GetInstance()->GetPhysicalDeviceHDRModeInfo(m_logicalCameraId, m_cameraId);
ChiHDRFeatureMode physicalHDRMode = physicalHDRModeInfo.HDRMode;
//usecases(torch widget, AON). IsNoBufferUsecase=true
//auto usecase IsNoBufferUsecase = FALSE
if ((FALSE == ExtensionModule::GetInstance()->IsNoBufferUsecase()))
{
// sensor mode selection not required for no buffer usecases(torch widget, AON).
DesiredSensorMode desiredSensorMode = {};
//获取帧率
desiredSensorMode.frameRate = ExtensionModule::GetInstance()->GetUsecaseMaxFPS(m_logicalCameraId);
//判断是否支持HDR 模式
if (ExtensionModule::GetInstance()->IsVideoHDRMode())
{
const auto sensorModes = [&]() -> ChiPtrView
{
UINT32 modeCount = 0;
CHISENSORMODEINFO* pAllModes = NULL;
//获取pLogicalCameraInfo modeCount pAllModes
//pLogicalCameraInfo->m_cameraCaps.numSensorModes
//pLogicalCameraInfo->pSensorModeInfo;
if (CDKResultSuccess == ExtensionModule::GetInstance()->GetPhysicalCameraSensorModes(m_cameraId,
&modeCount,
&pAllModes))
{
return ChiPtrView{static_cast(modeCount), pAllModes};
}
return ChiPtrView(static_cast(0), NULL);
}();
auto SupportsZZHDR = [&](const ChiSensorModeInfo& rSensorModeInfo)
{
return rSensorModeInfo.sensorModeCaps.u.ZZHDR;
};
desiredSensorMode.sensorModeCaps.u.ZZHDR = std::any_of(sensorModes.begin(), sensorModes.end(), SupportsZZHDR);
}//采用三曝光实现实时HDR预览
else if (SelectInSensorHDR3ExpUsecase::InSensorHDR3ExpPreview ==
ExtensionModule::GetInstance()->SelectInSensorHDR3ExpUsecase())
{
desiredSensorMode.sensorModeCaps.u.IHDR = 1;
}//Staggered HDR 行交织 HDR
else if (HDRFeatureModeSHDR == physicalHDRMode)
{
desiredSensorMode.sensorModeCaps.u.SHDR = 1;
desiredSensorMode.sensorHDRExposureType = ChiHDRExposureType::TwoExposure;
if (FALSE == rHDRInfo.isAutoHDREnabled)
{
switch (rHDRInfo.appReqNumHDRExposure)
{
case SingleHDRExposure:
desiredSensorMode.sensorHDRExposureType = ChiHDRExposureType::OneExposure;
break;
case TwoHDRExposure:
desiredSensorMode.sensorHDRExposureType = ChiHDRExposureType::TwoExposure;
break;
case ThreeHDRExposure:
desiredSensorMode.sensorHDRExposureType = ChiHDRExposureType::ThreeExposure;
break;
default:
desiredSensorMode.sensorHDRExposureType = ChiHDRExposureType::TwoExposure;
break;
}
}
}//MFHDR 多帧 HDR
else if (HDRFeatureModeMFHDR == physicalHDRMode)
{
desiredSensorMode.sensorModeCaps.u.Normal = TRUE;
// For MFHDR case, we will run sensor @ twice the desired output framerate
desiredSensorMode.frameRate *= 2;
}//QHDR(Quad HDR,四像素HDR)
else if (HDRFeatureModeQHDR == physicalHDRMode)
{
if (TRUE == rHDRInfo.appEnabledQHDR)
{
desiredSensorMode.sensorModeCaps.u.QHDR = 1;
desiredSensorMode.sensorHDRExposureType = ChiHDRExposureType::ThreeExposure;
}
}
UINT index = FindHighestWidthInputIndex(m_pipelineInputOptions, m_numInputOptions);
// @todo Select the highest width/height from all the input buffer requirements
desiredSensorMode.optimalWidth = m_pipelineInputOptions[index].bufferOptions.optimalDimension.width;
desiredSensorMode.optimalHeight = m_pipelineInputOptions[index].bufferOptions.optimalDimension.height;
desiredSensorMode.maxWidth = m_pipelineInputOptions[index].bufferOptions.maxDimension.width;
desiredSensorMode.maxHeight = m_pipelineInputOptions[index].bufferOptions.maxDimension.height;
desiredSensorMode.minWidth = m_pipelineInputOptions[index].bufferOptions.minDimension.width;
desiredSensorMode.minHeight = m_pipelineInputOptions[index].bufferOptions.minDimension.height;
desiredSensorMode.forceMode = ExtensionModule::GetInstance()->GetForceSensorMode(m_cameraId);
if (TRUE == m_isSensorModeHintSet)
{
CHX_LOG("input option:%dx%d, upscale:%d, override optimal size:%dx%d, sensor mode caps:%x",
desiredSensorMode.optimalWidth, desiredSensorMode.optimalHeight,
m_SensorModePickhint.postSensorUpscale,
m_SensorModePickhint.sensorOutputSize.width,
m_SensorModePickhint.sensorOutputSize.height,
m_SensorModePickhint.sensorModeCaps.value);
//判断是否支持裁剪 根据设置传感器模式 设置分辨率
if ((TRUE == m_SensorModePickhint.postSensorUpscale) &&
(m_SensorModePickhint.sensorOutputSize.width GetRemosaicType();
}
}
if (StreamConfigModeFastShutter == ExtensionModule::GetInstance()->GetOpMode(m_cameraId))
{
desiredSensorMode.sensorModeCaps.u.FS = 1;
}
if (HDRFeatureModeQHDR == physicalHDRMode)
{
// QCFA binning mode
if (FALSE == rHDRInfo.appEnabledQHDR)
{
desiredSensorMode.optimalWidth = m_pipelineInputOptions[index].bufferOptions.optimalDimension.width >> 1;
desiredSensorMode.optimalHeight = m_pipelineInputOptions[index].bufferOptions.optimalDimension.height >> 1;
}
}
m_pSelectedSensorMode = ChxSensorModeSelect::FindBestSensorMode(m_cameraId, &desiredSensorMode);
m_pSelectedSensorMode->batchedFrames = ExtensionModule::GetInstance()->GetNumBatchedFrames(m_logicalCameraId);
m_pSelectedSensorMode->HALOutputBufferCombined = ExtensionModule::GetInstance()->GetHALOutputBufferCombined();
}
if (TRUE == m_pipelineDescriptor.isRealTime)
{
if ((NULL != pLogicalHDRDeviceInfo) &&
(HDRFeatureModeSHDR == pLogicalHDRDeviceInfo->HDRModeInfo.HDRMode) &&
(InvalidMode != ExtensionModule::GetInstance()->GetForceSensorMode(m_cameraId)))
{
// This is to handle the case wherre overridesensor mode is used
pLogicalHDRDeviceInfo->numHDRExposure = static_cast(m_pSelectedSensorMode->HDRExposureType) + 1;
}
m_pipelineInfo.pipelineInputInfo.isInputSensor = TRUE;
m_pipelineInfo.pipelineInputInfo.sensorInfo.cameraId = m_cameraId;
m_pipelineInfo.pipelineInputInfo.sensorInfo.pSensorModeInfo = m_pSelectedSensorMode;
CHX_LOG_CORE_CFG("Pipeline[%s] Pipeline pointer %p Selected sensor Mode W=%d, H=%d Mode=%d",
m_pPipelineName,
this,
m_pipelineInfo.pipelineInputInfo.sensorInfo.pSensorModeInfo->frameDimension.width,
m_pipelineInfo.pipelineInputInfo.sensorInfo.pSensorModeInfo->frameDimension.height,
m_pipelineInfo.pipelineInputInfo.sensorInfo.pSensorModeInfo->modeIndex);
std::vector transition_modes = {1, 2, 3};
std::copy(transition_modes.begin(), transition_modes.end(), std::back_inserter(m_transitionModesList));
// add changes to get the list of seamless mode transitions possible for this sensor mode
}
else
{
m_pipelineInfo.pipelineInputInfo.isInputSensor = FALSE;
m_pipelineInfo.pipelineInputInfo.inputBufferInfo.numInputBuffers = m_numInputBuffers;
m_pipelineInfo.pipelineInputInfo.inputBufferInfo.pInputBufferDescriptors = GetInputBufferDescriptors();
if ((FALSE == ExtensionModule::GetInstance()->IsNoBufferUsecase()))
{
CHIBUFFERDIMENSION sensorOutDim = {};
sensorOutDim.width = m_pSelectedSensorMode->frameDimension.width;
sensorOutDim.height = m_pSelectedSensorMode->frameDimension.height;
for (UINT32 i = 0; i sensorOutDim.width) ||
(rBufferOptions.minDimension.height > sensorOutDim.height))
{
CHX_LOG_INFO("override min requirement to sensor output size. %dx%d -> %dx%d",
rBufferOptions.minDimension.width, rBufferOptions.minDimension.height,
sensorOutDim.width, sensorOutDim.height);
rBufferOptions.minDimension = sensorOutDim;
}
if ((rBufferOptions.minDimension.width > rBufferOptions.optimalDimension.width) ||
(rBufferOptions.minDimension.height > rBufferOptions.optimalDimension.height))
{
rBufferOptions.optimalDimension = rBufferOptions.minDimension;
}
}
}
}
m_pipelineInfo.hPipelineDescriptor = reinterpret_cast(m_hPipelineHandle);
m_pipelineInfo.pipelineOutputInfo.hPipelineHandle = NULL;
m_pipelineInfo.pipelineResourcePolicy = m_resourcePolicy;
m_pipelineInfo.isDeferFinalizeNeeded = m_isDeferFinalizeNeeded;
}
return result;
}
服务器托管,北京服务器托管,服务器租用 http://www.fwqtg.net
1. 概述 2014年,Google提出了包含Inception模块的网络结构,并命名为GoogLeNet[1],其中LeNet为致敬LeNet网络,GoogLeNet在当年的ILSVRC的分类任务上获得冠军。GoogLeNet经过多次的迭代,最初的版本也被称…