Searched refs:enable16BitTypes (Results 1 – 4 of 4) sorted by relevance
112 void enable16BitTypes (bool enabled) { m_16BitTypesEnabled = enabled; } in enable16BitTypes() function in vkt::MemoryModel::ShaderInterface
116 m_interface.enable16BitTypes(features & FEATURE_16BIT_TYPES); in RandomSharedLayoutCase()
1429 bool enable16BitTypes = parseContext.hlslEnable16BitTypes(); in acceptType() local1431 const TBasicType min16float_bt = enable16BitTypes ? EbtFloat16 : EbtFloat; in acceptType()1432 const TBasicType min10float_bt = enable16BitTypes ? EbtFloat16 : EbtFloat; in acceptType()1433 const TBasicType half_bt = enable16BitTypes ? EbtFloat16 : EbtFloat; in acceptType()1434 const TBasicType min16int_bt = enable16BitTypes ? EbtInt16 : EbtInt; in acceptType()1435 const TBasicType min12int_bt = enable16BitTypes ? EbtInt16 : EbtInt; in acceptType()1436 const TBasicType min16uint_bt = enable16BitTypes ? EbtUint16 : EbtUint; in acceptType()
395 std::vector<const wchar_t*> GetDXCArguments(uint32_t compileFlags, bool enable16BitTypes) { in GetDXCArguments() argument436 if (enable16BitTypes) { in GetDXCArguments()