I get an “GL_INVALID_ENUM” error from glGetError(); right after glewInit();
By catching it the previous problem with:
m_sampler = context->createTextureSamplerFromGLImage( texId, RT_TARGET_GL_TEXTURE_2D );
Is resolved. But OptiX still has an issue with this code. When I run without setting:
context["tex"]->setTextureSampler( m_sampler );
Everything is fine. When I do run this line, I get a very distorted image, which doesn’t get updated.
I guess I’m just misusing the sampler in OptiX. My .cu shader kernel looks like this:
/*
* Copyright (c) 2008 - 2009 NVIDIA Corporation. All rights reserved.
*
* NVIDIA Corporation and its licensors retain all intellectual property and proprietary
* rights in and to this software, related documentation and any modifications thereto.
* Any use, reproduction, disclosure or distribution of this software and related
* documentation without an express license agreement from NVIDIA Corporation is strictly
* prohibited.
*
* TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THIS SOFTWARE IS PROVIDED *AS IS*
* AND NVIDIA AND ITS SUPPLIERS DISCLAIM ALL WARRANTIES, EITHER EXPRESS OR IMPLIED,
* INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE. IN NO EVENT SHALL NVIDIA OR ITS SUPPLIERS BE LIABLE FOR ANY
* SPECIAL, INCIDENTAL, INDIRECT, OR CONSEQUENTIAL DAMAGES WHATSOEVER (INCLUDING, WITHOUT
* LIMITATION, DAMAGES FOR LOSS OF BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF
* BUSINESS INFORMATION, OR ANY OTHER PECUNIARY LOSS) ARISING OUT OF THE USE OF OR
* INABILITY TO USE THIS SOFTWARE, EVEN IF NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGES
*/
#include <optix.h>
#include <optixu/optixu_math_namespace.h>
using namespace optix;
rtDeclareVariable(float3, shading_normal, attribute shading_normal, );
rtDeclareVariable(float3, uv_coordinate, attribute uv_coordinate, );
struct PerRayData_radiance
{
float3 result;
float importance;
int depth;
};
struct PerRayData_shadow
{
float3 attenuation;
};
// Texture
rtTextureSampler<float4, 2> tex;
rtDeclareVariable(PerRayData_radiance, prd_radiance, rtPayload, );
rtDeclareVariable(PerRayData_shadow, prd_shadow, rtPayload, );
rtDeclareVariable(optix::Ray, ray, rtCurrentRay, );
rtDeclareVariable(float, scene_epsilon, , );
rtDeclareVariable(float, t_hit, rtIntersectionDistance, );
rtDeclareVariable(rtObject, top_object, , );
RT_PROGRAM void any_hit_shadow()
{
// this material is opaque, so it fully attenuates all shadow rays
prd_shadow.attenuation = make_float3(0);
rtTerminateRay();
}
RT_PROGRAM void closest_hit_radiance()
{
float3 color = normalize(rtTransformNormal(RT_OBJECT_TO_WORLD, shading_normal))*0.5f + 0.5f;
//float3 color = make_float3( tex2D( tex, uv_coordinate.x, uv_coordinate.y ) );
float reflectivity = 0.3f;
float3 ffnormal = faceforward( shading_normal, -ray.direction, shading_normal );
float3 hit_point = ray.origin + t_hit * ray.direction;
//float importance = prd_radiance.importance * optix::luminance( reflectivity );
float importance = 1.0f;
float importance_cutoff = 0.1f;
int max_depth = 2;
unsigned int radiance_ray_type = 0u;
// reflection ray
if( importance > importance_cutoff && prd_radiance.depth < max_depth)
{
PerRayData_radiance refl_prd;
refl_prd.importance = importance;
refl_prd.depth = prd_radiance.depth+1;
float3 R = reflect( ray.direction, ffnormal );
optix::Ray refl_ray( hit_point, R, radiance_ray_type, scene_epsilon );
rtTrace(top_object, refl_ray, refl_prd);
color += reflectivity * refl_prd.result;
}
prd_radiance.result = color;
}
Even without using the texture sampler in the .cu, the program fails…
I can’t seem to fix the GL error, but it doesn’t seem to matter anyway: http://stackoverflow.com/questions/10857335/opengl-glgeterror-returns-invalid-enum-after-call-to-glewinit
Do I have to do anything else in OptiX to use a texture sampler? I set it in C++ and declare it in my .cu shader.