Searched defs:compileFlags (Results 1 – 8 of 8) sorted by relevance
69 const char* compileFlags = in HlslUsingDXC() local141 UINT compileFlags = D3DCOMPILE_OPTIMIZATION_LEVEL0 | in HlslUsingFXC() local
35 uint32_t compileFlags = 0; in Initialize() local
189 uint32_t compileFlags; member395 std::vector<const wchar_t*> GetDXCArguments(uint32_t compileFlags, bool enable16BitTypes) { in GetDXCArguments()501 std::string CompileFlagsToStringFXC(uint32_t compileFlags) { in CompileFlagsToStringFXC()751 uint32_t compileFlags) { in Compile()
327 uint32_t compileFlags = 0; in Initialize() local
89 uint32_t compileFlags = 0; in GrCompileHLSLShader() local649 uint32_t compileFlags = 0; in MakeComputePipeline() local
96 uint32_t compileFlags = 0; in GrCompileHLSLShader() local680 uint32_t compileFlags = 0; in MakeComputePipeline() local
270 const EShMessages compileFlags = getCompileFlags(buildOptions, shaderLanguage); in compileShaderToSpirV() local
971 int compileFlags = stream->readInt<int>(); in load() local