Searched defs:compileFlags (Results 1 – 10 of 10) sorted by relevance
69 const char* compileFlags = in HlslUsingDXC() local141 UINT compileFlags = D3DCOMPILE_OPTIMIZATION_LEVEL0 | in HlslUsingFXC() local
27 uint32_t compileFlags = 0; in ComputePipeline() local
294 uint32_t compileFlags = 0; in RenderPipeline() local
35 uint32_t compileFlags = 0; in Initialize() local
189 uint32_t compileFlags; member395 std::vector<const wchar_t*> GetDXCArguments(uint32_t compileFlags, bool enable16BitTypes) { in GetDXCArguments()501 std::string CompileFlagsToStringFXC(uint32_t compileFlags) { in CompileFlagsToStringFXC()751 uint32_t compileFlags) { in Compile()
327 uint32_t compileFlags = 0; in Initialize() local
96 uint32_t compileFlags = 0; in GrCompileHLSLShader() local680 uint32_t compileFlags = 0; in MakeComputePipeline() local
261 const EShMessages compileFlags = getCompileFlags(buildOptions, shaderLanguage); in compileShaderToSpirV() local
946 int compileFlags = stream->readInt<int>(); in load() local
971 int compileFlags = stream->readInt<int>(); in load() local