diff --git a/source/blender/gpu/intern/gpu_extensions.c b/source/blender/gpu/intern/gpu_extensions.c index fd671446768..08f4206e4ca 100644 --- a/source/blender/gpu/intern/gpu_extensions.c +++ b/source/blender/gpu/intern/gpu_extensions.c @@ -1128,6 +1128,19 @@ static void shader_print_errors(const char *task, char *log, const char *code) fprintf(stderr, "%s\n", log); } +static const char *gpu_shader_standard_defines() +{ + /* some useful defines to detect GPU type */ + if(GPU_type_matches(GPU_DEVICE_ATI, GPU_OS_ANY, GPU_DRIVER_ANY)) + return "#define GPU_ATI\n"; + else if(GPU_type_matches(GPU_DEVICE_NVIDIA, GPU_OS_ANY, GPU_DRIVER_ANY)) + return "#define GPU_NVIDIA\n"; + else if(GPU_type_matches(GPU_DEVICE_INTEL, GPU_OS_ANY, GPU_DRIVER_ANY)) + return "#define GPU_INTEL\n"; + + return ""; +} + GPUShader *GPU_shader_create(const char *vertexcode, const char *fragcode, const char *libcode, const char *defines) { GLint status; @@ -1156,9 +1169,11 @@ GPUShader *GPU_shader_create(const char *vertexcode, const char *fragcode, const } if (vertexcode) { - const char *source[2]; + const char *source[3]; int num_source = 0; + source[num_source++] = gpu_shader_standard_defines(); + if (defines) source[num_source++] = defines; if (vertexcode) source[num_source++] = vertexcode; @@ -1178,9 +1193,11 @@ GPUShader *GPU_shader_create(const char *vertexcode, const char *fragcode, const } if (fragcode) { - const char *source[3]; + const char *source[4]; int num_source = 0; + source[num_source++] = gpu_shader_standard_defines(); + if (defines) source[num_source++] = defines; if (libcode) source[num_source++] = libcode; if (fragcode) source[num_source++] = fragcode; diff --git a/source/blender/gpu/shaders/gpu_shader_simple_vert.glsl b/source/blender/gpu/shaders/gpu_shader_simple_vert.glsl index 612f9cff6aa..9491eaa672d 100644 --- a/source/blender/gpu/shaders/gpu_shader_simple_vert.glsl +++ b/source/blender/gpu/shaders/gpu_shader_simple_vert.glsl @@ -29,11 +29,9 @@ void main() gl_Position = gl_ProjectionMatrix * co; -#ifdef __GLSL_CG_DATA_TYPES - // Setting gl_ClipVertex is necessary to get glClipPlane working on NVIDIA graphic cards. - // gl_ClipVertex works only on NVIDIA graphic cards so we have to check with - // __GLSL_CG_DATA_TYPES if a NVIDIA graphic card is used (Cg support). - // gl_ClipVerte is supported up to GLSL 1.20. +#ifdef GPU_NVIDIA + // Setting gl_ClipVertex is necessary to get glClipPlane working on NVIDIA + // graphic cards, while on ATI it can cause a software fallback. gl_ClipVertex = gl_ModelViewMatrix * gl_Vertex; #endif diff --git a/source/blender/gpu/shaders/gpu_shader_vertex.glsl b/source/blender/gpu/shaders/gpu_shader_vertex.glsl index 9e0db44ed31..8741a13ea9b 100644 --- a/source/blender/gpu/shaders/gpu_shader_vertex.glsl +++ b/source/blender/gpu/shaders/gpu_shader_vertex.glsl @@ -10,11 +10,9 @@ void main() varnormal = normalize(gl_NormalMatrix * gl_Normal); gl_Position = gl_ProjectionMatrix * co; - // Setting gl_ClipVertex is necessary to get glClipPlane working on NVIDIA graphic cards. - // gl_ClipVertex works only on NVIDIA graphic cards so we have to check with - // __GLSL_CG_DATA_TYPES if a NVIDIA graphic card is used (Cg support). - // gl_ClipVerte is supported up to GLSL 1.20. -#ifdef __GLSL_CG_DATA_TYPES +#ifdef GPU_NVIDIA + // Setting gl_ClipVertex is necessary to get glClipPlane working on NVIDIA + // graphic cards, while on ATI it can cause a software fallback. gl_ClipVertex = gl_ModelViewMatrix * gl_Vertex; #endif