R600: Initialize AMDGPUMachineFunction::ShaderType to ShaderType::COMPUTE

We need to intialize this to something and since clang does not set
the shader type attribute and clang is used only for compute shaders,
initializing it to COMPUTE seems like the best choice.

Reviewed-by: Christian König <christian.koenig@amd.com>
llvm-svn: 180620
This commit is contained in:
Tom Stellard 2013-04-26 18:32:24 +00:00
parent a8aa97d310
commit 456adc6c4e
2 changed files with 5 additions and 1 deletions

View File

@ -1,4 +1,5 @@
#include "AMDGPUMachineFunction.h"
#include "AMDGPU.h"
#include "llvm/IR/Attributes.h"
#include "llvm/IR/Function.h"
@ -8,6 +9,7 @@ const char *AMDGPUMachineFunction::ShaderTypeAttribute = "ShaderType";
AMDGPUMachineFunction::AMDGPUMachineFunction(const MachineFunction &MF) :
MachineFunctionInfo() {
ShaderType = ShaderType::COMPUTE;
AttributeSet Set = MF.getFunction()->getAttributes();
Attribute A = Set.getAttribute(AttributeSet::FunctionIndex,
ShaderTypeAttribute);

View File

@ -8,7 +8,7 @@
; CONFIG-CHECK: .section .AMDGPU.config
; CONFIG-CHECK-NEXT: .long 45096
; CONFIG-CHECK-NEXT: .long 0
define void @test(i32 %p) {
define void @test(i32 %p) #0 {
%i = add i32 %p, 2
%r = bitcast i32 %i to float
call void @llvm.SI.export(i32 15, i32 0, i32 1, i32 12, i32 0, float %r, float %r, float %r, float %r)
@ -16,3 +16,5 @@ define void @test(i32 %p) {
}
declare void @llvm.SI.export(i32, i32, i32, i32, i32, float, float, float, float)
attributes #0 = { "ShaderType"="0" } ; Pixel Shader