public:t-gede-13-1:lab6
Differences
This shows you the differences between two versions of the page.
Both sides previous revisionPrevious revisionNext revision | Previous revision | ||
public:t-gede-13-1:lab6 [2013/03/05 11:37] – [Lab Project] hannes | public:t-gede-13-1:lab6 [2024/04/29 13:33] (current) – external edit 127.0.0.1 | ||
---|---|---|---|
Line 202: | Line 202: | ||
_ogre-> | _ogre-> | ||
</ | </ | ||
- | - **Texture Vertex and Fragment Shaders** | + | - **Texture Vertex and Fragment Shaders**Painting an object in a single color is not particularly interesting. More commonly we read diffuse color information from a texture as we process each fragment. We can do this if we supply each fragment with texture coordinates that are interpolated from texture coordinates stored at the nearest vertices. To do texturing, we should create two shader programs: (1) We should make sure that a **vertex program** provides texture coordinates and (2) we should use the interpolated texture coordinates in a **fragment program** that returns the right color value from a texture. You should now create a new shader program file called '' |
- | - **Animated Vertex Shader** | + | void main_vp( |
- | - **Per Pixel Phong Shader** | + | // Per-vertex information |
+ | float4 vtx_position | ||
+ | float2 vtx_texcoord0 | ||
+ | // Provided parameters | ||
+ | uniform float4x4 | ||
+ | // Shader outputs | ||
+ | out float4 l_position | ||
+ | out float2 l_texcoord0 | ||
+ | |||
+ | { | ||
+ | // Calculate output position (a vertex shader is expected to at least do this!) | ||
+ | l_position = mul(mat_modelproj, | ||
+ | // Simply copy the input vertex UV to the output | ||
+ | l_texcoord0 = vtx_texcoord0; | ||
+ | } | ||
+ | |||
+ | void main_fp( | ||
+ | // Interpolated fragment values | ||
+ | float2 l_texcoord0 | ||
+ | // Provided parameters and data | ||
+ | uniform sampler2D texture, | ||
+ | // Shader output | ||
+ | out float4 o_color | ||
+ | { | ||
+ | // Just sample texture using supplied UV | ||
+ | o_color = tex2D(texture, | ||
+ | } | ||
+ | </ | ||
+ | vertex_program shader/ | ||
+ | source textureshader.cg | ||
+ | entry_point main_vp | ||
+ | profiles vs_1_1 arbvp1 | ||
+ | |||
+ | default_params { | ||
+ | param_named_auto mat_modelproj worldviewproj_matrix | ||
+ | } | ||
+ | } | ||
+ | |||
+ | fragment_program shader/ | ||
+ | source textureshader.cg | ||
+ | entry_point main_fp | ||
+ | profiles ps_1_1 arbfp1 | ||
+ | } | ||
+ | </ | ||
+ | material shader/ | ||
+ | technique { | ||
+ | pass { | ||
+ | vertex_program_ref shader/ | ||
+ | } | ||
+ | fragment_program_ref shader/ | ||
+ | } | ||
+ | texture_unit { | ||
+ | texture Water02.jpg 2d | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | }</ | ||
+ | - **Animated Vertex Shader**To try to have a vertex shader to something a little more interesting, | ||
+ | void main_time_vp( | ||
+ | // Per-vertex information | ||
+ | float4 vtx_position | ||
+ | float2 vtx_texcoord0 | ||
+ | // Provided parameters | ||
+ | uniform float4x4 | ||
+ | uniform float t, // Expecting time here | ||
+ | // Shader outputs | ||
+ | out float4 l_position | ||
+ | out float2 l_texcoord0 | ||
+ | |||
+ | { | ||
+ | // Displace the vertical coordinate based on x-location and time | ||
+ | float4 temp = vtx_position; | ||
+ | temp.y = temp.y+cos(temp.x+t); | ||
+ | |||
+ | // Calculate output position | ||
+ | l_position = mul(mat_modelproj, | ||
+ | // Simply copy the input vertex UV to the output | ||
+ | l_texcoord0 = vtx_texcoord0; | ||
+ | } | ||
+ | </ | ||
+ | vertex_program shader/ | ||
+ | source textureshader.cg | ||
+ | entry_point main_time_vp | ||
+ | profiles vs_1_1 arbvp1 | ||
+ | |||
+ | default_params { | ||
+ | param_named_auto mat_modelproj worldviewproj_matrix | ||
+ | param_named_auto t time | ||
+ | } | ||
+ | } | ||
+ | </ | ||
+ | - **Per Pixel Phong Shader**Finally, let's try calculating the color value of a fragment based on an actual lighting model such as the Phong lighting model. Since we will be calculating the lighting value inside each fragment, we call this **per-pixel lighting**. This basically means that instead of using interpolated color values from the nearby vertices, we use interpolated vector values (model space vertex position, normal, view direction and light direction) to calculate the color value inside the fragment program. Create a new shader program file called '' | ||
+ | // Cg | ||
+ | void main_vp( | ||
+ | float4 vtx_position | ||
+ | float3 vtx_normal | ||
+ | float2 vtx_texcoord0 | ||
+ | |||
+ | uniform float4x4 mat_modelproj, | ||
+ | uniform float4 | ||
+ | uniform float4 | ||
+ | |||
+ | out float4 l_position | ||
+ | out float2 l_texcoord0 : TEXCOORD0, | ||
+ | out float3 l_N : TEXCOORD1, | ||
+ | out float3 l_L : TEXCOORD2, | ||
+ | out float3 l_V : TEXCOORD3, | ||
+ | out float3 l_P : TEXCOORD4 | ||
+ | ) | ||
+ | { | ||
+ | l_position = mul(mat_modelproj, | ||
+ | l_texcoord0 = vtx_texcoord0; | ||
+ | |||
+ | // The principal vectors for our Phong lighting model calculation: | ||
+ | // L = Light Vector, N = Vertex Normal, V = View Vector R = Light Reflection Vector | ||
+ | l_N = vtx_normal; | ||
+ | // We passed in the light and camera NodePaths and get their model space coordinates | ||
+ | // here through the " | ||
+ | l_L = normalize(mspos_light.xyz - vtx_position.xyz); | ||
+ | l_V = normalize(mspos_camera.xyz - vtx_position.xyz); | ||
+ | l_P = vtx_position.xyz; | ||
+ | // We can't calculate the R vector here because it won't interpolate correctly for each fragment | ||
+ | // (it relies on a dot product which complicates things for it), so we'll calculate it inside the | ||
+ | // fragment shader. The other vectors will all get interpolated and passed to the fragments. | ||
+ | |||
+ | } | ||
+ | |||
+ | void main_fp( | ||
+ | float2 l_texcoord0 : TEXCOORD0, | ||
+ | float3 l_N : TEXCOORD1, | ||
+ | float3 l_L : TEXCOORD2, | ||
+ | float3 l_V : TEXCOORD3, | ||
+ | float3 l_P : TEXCOORD4, | ||
+ | |||
+ | uniform float4 k_ambientc, | ||
+ | uniform float4 k_diffusec, | ||
+ | uniform float4 k_specularc, | ||
+ | |||
+ | out float4 o_color : COLOR) | ||
+ | { | ||
+ | // Inside the fragment shader, we get all the interpolated vectors | ||
+ | // The Diffuse Attenuation follows under what angle the light shines on the fragment | ||
+ | float diffuse_attn = saturate(dot(l_L, | ||
+ | |||
+ | // The Specular Attenuation follows how close to the line of light reflection you are looking | ||
+ | float3 R = normalize(2*l_N*dot(l_N, | ||
+ | float specular_attn = pow(saturate(dot(R, | ||
+ | |||
+ | // Here we return the color based on the full phong light model | ||
+ | o_color = 0.2*k_ambientc + diffuse_attn*k_diffusec+specular_attn*k_specularc; | ||
+ | |||
+ | } | ||
+ | </ | ||
+ | vertex_program shader/ | ||
+ | source lightingshader.cg | ||
+ | entry_point main_vp | ||
+ | profiles vs_1_1 arbvp1 | ||
+ | |||
+ | default_params { | ||
+ | param_named_auto mat_modelproj worldviewproj_matrix | ||
+ | param_named_auto mspos_light light_position_object_space 0 | ||
+ | param_named_auto mspos_camera camera_position_object_space | ||
+ | } | ||
+ | } | ||
+ | </ | ||
+ | |||
+ | fragment_program shader/ | ||
+ | source lightingshader.cg | ||
+ | entry_point main_fp | ||
+ | profiles ps_2_0 arbfp1 | ||
+ | |||
+ | default_params { | ||
+ | param_named k_ambientc float4 0.5 0.5 0.5 1.0 | ||
+ | param_named k_diffusec float4 0.8 0.1 0.1 1.0 | ||
+ | param_named k_specularc float4 0.6 0.6 0.6 1.0 | ||
+ | } | ||
+ | } | ||
+ | </ | ||
/var/www/cadia.ru.is/wiki/data/attic/public/t-gede-13-1/lab6.1362483475.txt.gz · Last modified: 2024/04/29 13:32 (external edit)