On 31/07/2016 at 13:45, xxxxxxxx wrote:
User Information:
Cinema 4D Version: R13
Platform: Windows ;
Language(s) : C++ ;
---------
Hi,
I'm trying to convert barycentric coords to UV coords. So that I can then use those UV coords to sample a shader in a material's color channel. But I keep hitting a dead end.
There's tons of tutorials for this on the internet. But when it comes time to actually convert them to UV coords they either don't show it. Or the code they use doesn't work for me using the C4D SDK.
Here is my DescriptionToolData plugin code that uses a GeRayCollider ray and gets the barycentric coords from the polygon when I LMB click on it.
How can I properly convert them into UV coords (0, 1) with (0, 0) at the top left corner?
TOOLDRAW MyTool::Draw(BaseDocument* doc, BaseContainer& data, BaseDraw* bd, BaseDrawHelp* bh, BaseThread* bt, TOOLDRAWFLAGS flags)
{
BaseObject *obj = doc->GetActiveObject();
if(!obj) return TOOLDRAW_0;
AutoAlloc<GeRayCollider> rc; //Create an instance of the ray collider class
if (!rc) return TOOLDRAW_0;
rc->Init(obj, TRUE); //Initialize it to work with the active object
Vector wtail = bd->SW(Vector(mouseX,mouseY,0)); //Get the tail of the ray (a virtual line)
Vector whead = bd->SW(Vector(mouseX,mouseY,10000.0)); //Get the head of the ray (a virtual line)
Vector otail = (!obj->GetMg()) * wtail;
Vector oray = (whead - wtail) ^ (!obj->GetMg());
rc->Intersect(otail, !oray, 10000.0); //Checks to see if the ray(virtual line) running between the tail and the head intersects the object
GeRayColResult colliderResults; //Create a struct to hold the things the ray collides with
if (rc->GetNearestIntersection(&colliderResults)) //If an intersection (closest to the start of the ray) is true
{
faceID = colliderResults.face_id; //Gets the polygons Index#
hitPos = colliderResults.hitpos; //Position of the intersection
distance = colliderResults.distance; //The distance from the camera to the mouse's position on the object(good for checking distances)
backface = colliderResults.backface; //tests if the polygon is facing towards or away from the camera
faceNormal = colliderResults.f_normal;
LONG triId = colliderResults.tri_face_id;
bCoords.x = colliderResults.barrycoords.x;
bCoords.y = colliderResults.barrycoords.y;
bCoords.z = colliderResults.barrycoords.z;
//////// Now I get the shader that's in the materials color channel ///////////
//Get the shader in the color channel
BaseMaterial *mat = doc->GetFirstMaterial();
if (!mat) return TOOLDRAW_0;
BaseShader *shdr = mat->GetFirstShader();
if (!shdr) return TOOLDRAW_0;
//Render the shader
InitRenderStruct irs;
shdr->InitRender(irs);
ChannelData cd;
cd.p = Vector(0, 0, 0);
cd.n = Vector(0, 0, 1);
cd.d = Vector(0, 0, 0);
cd.t = doc->GetTime().Get();
cd.texflag = 0;
cd.vd = NULL;
cd.off = 0.0;
cd.scale = 0.0;
////////// Now I put it all together and try to sample the UV's ///////////
PolygonObject *pObj = (PolygonObject* )obj;
if (!pObj) return TOOLDRAW_0;
CPolygon poly = pObj->GetPolygonW()[0];
Vector pnta = pObj->GetPointW()[poly.a];
Vector pntb = pObj->GetPointW()[poly.b];
Vector pntc = pObj->GetPointW()[poly.c];
Vector pntd = pObj->GetPointW()[poly.d];
//Here is where the problems start
//I'm trying to convert the barycentric values to UV values (0,1)
//With (0,0) being at the top left corner
//This does not work properly
//This will accurately sample the shader's colors if the mouse is clicked in the middle of the polygon
//But not if I click the mouse anywhere else on the polygon!!!
cd.p = bCoords; //Make the UVs match the barycentric coords?
Vector color = shdr->Sample(&cd); //Sample the shader's colors using the barycentric coords
GePrint(RealToString(color.x) + "," + RealToString(color.y) + "," + RealToString(color.z));
//This also does not work properly
//if triId == (polyIndex+1) then the barrycentric coordinates correspond to a triangle or to the first triangle of a quad polygon
//Otherwise it corresponds to the second triangle of a quad polygon
Vector UV;
if (triId == (faceID + 1)) UV = pnta * (1.0 - bCoords.x - bCoords.y) + pntc*bCoords.x + pntb*bCoords.y;
else UV = pnta * (1.0 - bCoords.x - bCoords.y) + pntc*bCoords.x + pntb*bCoords.y;
Vector UVn = UV.GetNormalized();
GePrint(RealToString(UVn.x) + " , " + RealToString(UVn.y) + " , " + RealToString(UVn.z));
//How the heck do we convert barycentric coords to UV coords?
shdr->FreeRender();
}
-ScottA