I have a situation, I have 2 bodies and let's suppose one of them is in T-Pose and the other is idle, with it's arms looking down.
I have to find which rotation was applied to your elbow, for example, to match first body's (elbow-to-hand bone) with second body's (elbow-to-hand bone).
I thought about taking three vertex's (they have almost the same meshes, so that's not a problem but I cannot use vertex's normals bcause they're not the same).
Then the thing is, I have three points forming a triangle on first body and 3 points forming another triangle on second body and I want to find which rotation needs to be applied to one of the triangles to match the other.
If it helps, I'm doing it with unity and c#.
EDIT: IMPORTANT! triangles might not have the same dimensions but they are 2d so I only need to know it's rotation
If I understand your problem correctly, you need to find the transformation between two configurations of the same triangle in 2D: {v1,v2,v3} and {w1, w2,w3}. I assume you know vertex correspondance, for example v1->w1, v2->w2, v3->w3.
Here is what you can do:
1. find rotation angle between the two configurations.
2. rotate a point, for example v1 and obtain rotatedV1
3. find translation between rotatedV1 and w1.
Here is the code. For the sake of simplicity, I used WPF struct Vector3D. The final result is a standard double array collecting a 4x4 transformation matrix
static void miniTest() {
// first triangle
Vector3D v1 = new Vector3D();
Vector3D v2 = new Vector3D(2, 0, 0);
Vector3D v3 = new Vector3D(2, 3, 0);
Vector3D w1 = new Vector3D(0,-1, 0);
Vector3D w2 = new Vector3D(0 , -3, 0);
Vector3D w3 = new Vector3D(3, -1, 0);
double[,] transofrmation = getTrMatrix(v1, v2, v3, w1, w2, w3);
}
public static double[,] getTrMatrix(Vector3D V1, Vector3D V2, Vector3D V3, Vector3D W1, Vector3D W2, Vector3D W3) {
Vector3D s1 = V2 - V1;
s1.Normalize();
Vector3D z1 = W2 - W1;
z1.Normalize();
double angle = Math.Acos(Vector3D.DotProduct(s1, z1));
double[,] rotT = new double[,] {
{ Math.Cos(angle), -Math.Sin(angle), 0},
{ Math.Sin(angle), Math.Cos(angle), 0},
{ 0,0,1},
};
double[] rotatedV1 = multiply(rotT, V1);
Vector3D translation = new Vector3D( W1.X - rotatedV1[0], W1.Y - rotatedV1[1], W1.Z - rotatedV1[2]);
double[,] T = new double[,] {
{ Math.Cos(angle), -Math.Sin(angle), 0, translation.X},
{ Math.Sin(angle), Math.Cos(angle), 0, translation.Y},
{ 0,0,1, translation.Z},
{0,0,0,1 } };
return T;
}
// apply rotation matrix to vector
static double[] multiply(double[,] rotMat, Vector3D vec) {
double[] result = new double[3];
result[0] = rotMat[0, 0] * vec.X + rotMat[0, 1] * vec.Y + rotMat[0, 2] * vec.Z;
result[1] = rotMat[1, 0] * vec.X + rotMat[1, 1] * vec.Y + rotMat[1, 2] * vec.Z;
result[1] = rotMat[2, 0] * vec.X + rotMat[2, 1] * vec.Y + rotMat[2, 2] * vec.Z;
return result;
}
Related
I'm new to 3D programming and am having a terrible time getting my texture to fill my meshes properly. I've got it sizing correctly on the walls but the texture on the roof is running on an angle and is stretched out too far.
I have several methods to create the mesh but they are all eventually sent to AddTriangle method, where the TextureCoordinates are set.
public static void AddTriangle(this MeshGeometry3D mesh, Point3D[] pts)
{
// Create the points.
int index = mesh.Positions.Count;
foreach (Point3D pt in pts)
{
mesh.Positions.Add(pt);
mesh.TriangleIndices.Add(index++);
mesh.TextureCoordinates.Add(new Point(pt.X + pt.Z, 0 - pt.Y));
}
}
Here is how my material is set up.
imageBrush.ImageSource = new BitmapImage(new Uri("pack://application:,,,/Textures/shingles1.jpg"));
imageBrush.TileMode = TileMode.Tile;
imageBrush.ViewportUnits = BrushMappingMode.Absolute;
imageBrush.Viewport = new Rect(0, 0, 25, 25);
SidingColor = new DiffuseMaterial(imageBrush);
SidingColor.Color = RGB(89, 94, 100);
My texture looks like this:
And here is the results I'm getting.
That's as close as I could get after hours of fooling around and googling.
Whew that was a little more difficult than I anticipated.
Here are few resources that helped me find a solution.
How to convert a 3D point on a plane to UV coordinates?
From the link below I realized the above formula above formula was correct but for a right hand coordinate system. I converted it and that was the final step.
http://www.math.tau.ac.il/~dcor/Graphics/cg-slides/geom3d.pdf
Here is the code that works in case someone else has this question.
public static void AddTriangle(this MeshGeometry3D mesh, Point3D[] pts)
{
if (pts.Count() != 3) return;
//use the three point of the triangle to calculate the normal (angle of the surface)
Vector3D normal = CalculateNormal(pts[0], pts[1], pts[2]);
normal.Normalize();
//calculate the uv products
Vector3D u;
if (normal.X == 0 && normal.Z == 0) u = new Vector3D(normal.Y, -normal.X, 0);
else u = new Vector3D(normal.X, -normal.Z, 0);
u.Normalize();
Vector3D n = new Vector3D(normal.Z, normal.X, normal.Y);
Vector3D v = Vector3D.CrossProduct(n, u);
int index = mesh.Positions.Count;
foreach (Point3D pt in pts)
{
//add the points to create the triangle
mesh.Positions.Add(pt);
mesh.TriangleIndices.Add(index++);
//apply the uv texture positions
double u_coor = Vector3D.DotProduct(u, new Vector3D(pt.Z,pt.X,pt.Y));
double v_coor = Vector3D.DotProduct(v, new Vector3D(pt.Z, pt.X, pt.Y));
mesh.TextureCoordinates.Add(new Point(u_coor, v_coor));
}
}
private static Vector3D CalculateNormal(Point3D firstPoint, Point3D secondPoint, Point3D thirdPoint)
{
var u = new Point3D(firstPoint.X - secondPoint.X,
firstPoint.Y - secondPoint.Y,
firstPoint.Z - secondPoint.Z);
var v = new Point3D(secondPoint.X - thirdPoint.X,
secondPoint.Y - thirdPoint.Y,
secondPoint.Z - thirdPoint.Z);
return new Vector3D(u.Y * v.Z - u.Z * v.Y, u.Z * v.X - u.X * v.Z, u.X * v.Y - u.Y * v.X);
}
I need to do a boolean subtraction between two models in C#. One of the meshs will be entirely within the other mesh, so I was hoping to reverse the normals for the one model and add the two models together. I am at a loss on how to invert the normals though.
This is how I'm calculating a surface normal:
//creates surface normals
Vector3D CalculateSurfaceNormal(Point3D p1, Point3D p2, Point3D p3)
{
Vector3D v1 = new Vector3D(0, 0, 0); // Vector 1 (x,y,z) & Vector 2 (x,y,z)
Vector3D v2 = new Vector3D(0, 0, 0);
Vector3D normal = new Vector3D(0, 0, 0);
// Finds The Vector Between 2 Points By Subtracting
// The x,y,z Coordinates From One Point To Another.
// Calculate The Vector From Point 2 To Point 1
v1.X = p1.X - p2.X;
v1.Y = p1.Y - p2.Y;
v1.Z = p1.Z - p2.Z;
// Calculate The Vector From Point 3 To Point 2
v2.X = p2.X - p3.X;
v2.Y = p2.Y - p3.Y;
v2.Z = p2.Z - p3.Z;
// Compute The Cross Product To Give Us A Surface Normal
normal.X = v1.Y * v2.Z - v1.Z * v2.Y; // Cross Product For Y - Z
normal.Y = v1.Z * v2.X - v1.X * v2.Z; // Cross Product For X - Z
normal.Z = v1.X * v2.Y - v1.Y * v2.X; // Cross Product For X - Y
normal.Normalize();
return normal;
}
I was advised to reverse the normal by negating it:
n = CalculateSurfaceNormal(p1, p2, p3);
n = new Vector3D(-1 * n.X, -1 * n.Y, -1 * n.Z);
I find the values are negated, but when I view the model in a 3D program, there is no change in the model.
Another suggestion was to try backface culling by changing the order of the vectors. I tried this by swapping the order of v1 and v2:
//creates invertedsurface normals
Vector3D CalculateInvertedSurfaceNormal(Point3D p1, Point3D p2, Point3D p3)
{
Vector3D v1 = new Vector3D(0, 0, 0); // Vector 1 (x,y,z) & Vector 2 (x,y,z)
Vector3D v2 = new Vector3D(0, 0, 0);
Vector3D normal = new Vector3D(0, 0, 0);
// Finds The Vector Between 2 Points By Subtracting
// The x,y,z Coordinates From One Point To Another.
// Calculate The Vector From Point 2 To Point 1
v2.X = p1.X - p2.X;
v2.Y = p1.Y - p2.Y;
v2.Z = p1.Z - p2.Z;
// Calculate The Vector From Point 3 To Point 2
v1.X = p2.X - p3.X;
v1.Y = p2.Y - p3.Y;
v1.Z = p2.Z - p3.Z;
// Compute The Cross Product To Give Us A Surface Normal
normal.X = v1.Y * v2.Z - v1.Z * v2.Y; // Cross Product For Y - Z
normal.Y = v1.Z * v2.X - v1.X * v2.Z; // Cross Product For X - Z
normal.Z = v1.X * v2.Y - v1.Y * v2.X; // Cross Product For X - Y
normal.Normalize();
return normal;
}
No change in the model.
Here is the whole code:
private void SaveMoldMeshtoStlFile(MeshGeometry3D mesh, string filename)
{
if (mesh == null)
return;
if (File.Exists(filename))
{
File.SetAttributes(filename, FileAttributes.Normal);
File.Delete(filename);
}
Point3DCollection vertexes = mesh.Positions;
Int32Collection indexes = mesh.TriangleIndices;
Point3D p1, p2, p3;
Vector3D n;
string text;
using (TextWriter writer = new StreamWriter(filename))
{
writer.WriteLine("solid Bolus");
for (int v = 0; v < mesh.TriangleIndices.Count(); v += 3)
{
//gather the 3 points for the face and the normal
p1 = vertexes[indexes[v]];
p2 = vertexes[indexes[v + 1]];
p3 = vertexes[indexes[v + 2]];
n = CalculateInvertedSurfaceNormal(p1, p2, p3);
text = string.Format("facet normal {0} {1} {2}", n.X,n.Y, n.Z);
writer.WriteLine(text);
writer.WriteLine("outer loop");
text = String.Format("vertex {0} {1} {2}", p1.X, p1.Y, p1.Z);
writer.WriteLine(text);
text = String.Format("vertex {0} {1} {2}", p2.X, p2.Y, p2.Z);
writer.WriteLine(text);
text = String.Format("vertex {0} {1} {2}", p3.X, p3.Y, p3.Z);
writer.WriteLine(text);
writer.WriteLine("endloop");
writer.WriteLine("endfacet");
}
}
}
//creates inverted surface normals
Vector3D CalculateInvertedSurfaceNormal(Point3D p1, Point3D p2, Point3D p3)
{
Vector3D v1 = new Vector3D(0, 0, 0); // Vector 1 (x,y,z) & Vector 2 (x,y,z)
Vector3D v2 = new Vector3D(0, 0, 0);
Vector3D normal = new Vector3D(0, 0, 0);
// Finds The Vector Between 2 Points By Subtracting
// The x,y,z Coordinates From One Point To Another.
// Calculate The Vector From Point 2 To Point 1
v2.X = p1.X - p2.X;
v2.Y = p1.Y - p2.Y;
v2.Z = p1.Z - p2.Z;
// Calculate The Vector From Point 3 To Point 2
v1.X = p2.X - p3.X;
v1.Y = p2.Y - p3.Y;
v1.Z = p2.Z - p3.Z;
// Compute The Cross Product To Give Us A Surface Normal
normal.X = v1.Y * v2.Z - v1.Z * v2.Y; // Cross Product For Y - Z
normal.Y = v1.Z * v2.X - v1.X * v2.Z; // Cross Product For X - Z
normal.Z = v1.X * v2.Y - v1.Y * v2.X; // Cross Product For X - Y
normal.Normalize();
return normal;
}
Is there an error with my code? Am I missing something? I've tried out the exported models in a few different programs, and all are showing the exported model still has the normals facing outwards. I tried flipping the normals in Blender and found the other programs also showed the normals flipped, so I'm fairly sure it's a problem with my program.
Figured out the solution.
The order of points for each triangle is crucial. If the order of points doesn't support the normal's direction, I'm finding other programs will automatically correct the normal.
Where I had this:
//gather the 3 points for the face and the normal
p1 = vertexes[indexes[v]];
p2 = vertexes[indexes[v + 1]];
p3 = vertexes[indexes[v + 2]];
n = CalculateInvertedSurfaceNormal(p1, p2, p3);
I instead reversed the direction of the points by changed to this:
//gather the 3 points for the face and the normal
p3 = vertexes[indexes[v]];
p2 = vertexes[indexes[v + 1]];
p1 = vertexes[indexes[v + 2]];
n = CalculateInvertedSurfaceNormal(p1, p2, p3);
That solved my problem.
I am trying to ascertain the X,Y,Z angles held within a Matrix by decomposing the matrix. I am using .net 4.5 c#.
I created a test to check the following:
If I create an Matrix4x4 with identity values only
Rotate the matrix by 45 degrees
Decompose the matrix and evaluate the quaternion returned (gives the x,y,z angles)
Check that the X value out matches the 45 degrees put in
I get the following results:
X:0.5 Y:0 Z:0
I was expecting:
X:0.45 Y:0 Z:0
Test Code
Quaternion quatDecomposed;
Vector3D translation;
Matrix4x4 rot = Matrix4x4.RotationAroundX(45);
rot.DecomposeNoScaling(out quatDecomposed, out translation);
I have created my own Matrix4x4, Vector3D and Angle3D structures shown in the examples below.
My Matrix4x4 rotate around x method is as follows:
public static Matrix4x4 RotationAroundX(double degrees)
{
// [1, 0, 0, 0]
// [0, cos,-sin,0]
// [0, sin,cos, 0]
// [0, 0, 0, 1]
// convert degrees to radians.
double radians = DoubleExtensions.DegreesToRadians(degrees);
// return matrix.
var matrixTransformed = Matrix4x4.Identity;
matrixTransformed.M22 = (float)Math.Cos(radians);
matrixTransformed.M23 = (float)-(Math.Sin(radians));
matrixTransformed.M32 = (float)Math.Sin(radians);
matrixTransformed.M33 = (float)Math.Cos(radians);
//return matrix;
return matrixTransformed;
}
My decompose no scaling method is as follows:
public void DecomposeNoScaling(out Quaternion rotation, out Vector3D translation)
{
translation.X = this[1, 4];
translation.Y = this[2, 4];
translation.Z = this[3, 4];
rotation = new Quaternion(new Matrix3x3(this));
}
What I am looking to get out is the angles contained within the Matrix4x4, I do this as follows:
Angle3D angles = new Angle3D(quatDecomposed.X, quatDecomposed.Y, quatDecomposed.Z);
Can anyone spot what I'm doing wrong? What I am REALLY trying to work out is the Euler angles from the matrix4x4 in ZYX order.
Thanks in advance!
Shouldn't be the last row of the matrix be "1" ?
[1 0 0 0]
[0 cos -sin 0]
[0 sin cos 0]
[0 0 0 1]
(last row last column should be 1)
Just in case anyone else needs to know, this is how I get the Euler angles directly from the Matrix:
public static Angle3D GetAngles(Matrix4x4 source)
{
double thetaX, thetaY, thetaZ = 0.0;
thetaX = Math.Asin(source.M32);
if (thetaX < (Math.PI / 2))
{
if (thetaX > (-Math.PI / 2))
{
thetaZ = Math.Atan2(-source.M12, source.M22);
thetaY = Math.Atan2(-source.M31, source.M33);
}
else
{
thetaZ = -Math.Atan2(-source.M13, source.M11);
thetaY = 0;
}
}
else
{
thetaZ = Math.Atan2(source.M13, source.M11);
thetaY = 0;
}
// Create return object.
Angle3D angles = new Angle3D(thetaX, thetaY, thetaZ);
// Convert to degrees.;
angles.Format = AngleFormat.Degrees;
// Return angles.
return angles;
}
I am working with point3D and vector3D classes and I need some help adjusting a point by a given distance.
Point A - point residing at coordinate 0,0,0.
Point B - point residing at coordinate 1,1,1.
Vector AB - vector AB which tells me the length between the two points A and B is distance = 1.73205078.
Code:
Point3D A = new Point3D { X = 0, Y = 0, Z = 0 };
Point3D B = new Point3D { X = 1, Y = 1, Z = 1 };
Vector3D AtoB = A - B;
Double distanceBetweenAandB = AtoB.Length; // the distance will be 1.73205078 here.
I would like to adjust point B. I would like to reduce the distance between point A and point B to 0.5 instead of 1 (adjusting to position C as shown in the diagram). I am trying to work out how to do this.
Point A (0,0,0) is known, point B (1,1,1) is known and the distance to adjust by is known (0.5). How do I calculate?
Pseudo code:
Point3D A = new Point3D { X = 0, Y = 0, Z = 0 };
Point3D B = new Point3D { X = 1, Y = 1, Z = 1 };
Double distanceToAdjust = 0.5;
Point3D newCoordinate = B - distanceToAdjust; // this doesnt work!
Adjusted point B shown in diagram below:
I am using my own defined Point3D class and Vector3D class.
Let's assume your given parameters for your points, and create a 3rd, which we'll call newCoordinate, and that point A will be your reference:
Point3D A = new Point3D { X = 0, Y = 0, Z = 0 };
Point3D B = new Point3D { X = 1, Y = 1, Z = 1 };
Double distanceToAdjust = 0.5;
Point3D newCoordinate = new Point3D {
A.X + ((B.X - A.X) * distanceToAdjust),
A.Y + ((B.Y - A.Y) * distanceToAdjust),
A.Z + ((B.Z - A.Z) * distanceToAdjust)
}
Here we see the original points:
Assuming this values, newCoordinate would sit at X=0.5, Y=0.5, Z=0.5. Nice graph follows:
There it is, sitting right in between the two original points.
As a simulation, if you change A and B and assume this values instead:
Point3D A = new Point3D { X = -8, Y = 4, Z = 3 };
Point3D B = new Point3D { X = 3, Y = 2, Z = 1 };
Then newCoordinate position would be X=-2.5, Y=3, Z=2.
Now, same points, but using distanceToAdjust = 1.2:
Keep this two things in mind:
Changes in distance always need a reference point. In my sample, I assumed A; that's why it appears as the first portion of each newCoordinate parameter initialization.
distanceToAdjust was taken as a multiplier factor.
Addendum: The nifty tool I used to help visualization can be found here.
Assuming you implemented vector operations:
if point A is always [0,0,0]
Point3D new = B.Normalize() * distance;
for any two points
Point3D newCoord = A + ((B - A).Normalize() * distance); //move to origin, normalize, scale and move back
not fast solution though.
"the length between the two points A and B is distance = 1"
No, the distance is the square root of three, about 1.732.
The distance from (0,0,0) to (0,0,1) is 1. The distance from (0,0,0) to (0,1,1) is the square root of two. (Think a triangle in two dimensions, and Pythagoas theorem.) The distance from (0,0,0) to (1,1,1) is the square root of three. (Think a triangle in two dimensions, where that dimension is on a plane along the hypothenuse of the previous triangle. AB = √(1² + (√2)²).)
I assume that you don't want to subtract 0.5 from anything, but actually multiply the distance by 0.5, i.e. getting halfways from A to B. You can calculate the point C by taking that part of the distance between point A and point B in each dimension:
Point3D C = new Point3D {
A.X + (B.X - A.X) * distanceToAdjust,
A.Y + (B.Y - A.Y) * distanceToAdjust,
A.Z + (B.Z - A.Z) * distanceToAdjust
};
In pseudo code, here's how I ended up implementing
pointA = …
pointB = …
vectorAB = B-A
desiredDistance = 0.5; // where 0.5 is vectorAB.Length/desiredDistance
vectorAC = vectorAB * desiredDistance ;
pointC = A+vectorAC;
Actual code:
Vector3D pointC = (Vector3D)(A + (float)desiredDistance * (B - A));
I'm unsure if this is what you would need but is it possible to create a method within your Point3D class to allow subtraction/addition?
(Just guessing the Point3D class as simply as it could be)Something like
public class Point3D
{
public double X,Y,Z
public void ChangeCord(Point3D point)
{
X =- point.X;
Y =- point.Y;
Z =- point.Z;
}
}
So it could just be:
Point3D A = new Point3D { X = 0, Y = 0, Z = 0 };
Point3D B = new Point3D { X = 1, Y = 1, Z = 1 };
Double distanceToAdjust = 0.5;
Point3D newCoordinate = B.ChangeCord(new Point3d{ X = 0.5, Y = 0.5, Z = 0.5 });
How can i calculate an arc through 3 points A, B, C in 3d. from A to C passing B (order is taken care of).
Most robot arms have this kind of move command. I need to simulate it and apply different speed dynamics to it and need therefore a parameter 0..1 which moves a position from A to C.
EDIT:
what i have is radius and center of the arc, but how can i parameterize the circle in 3d if i know the start and end angle?
EDIT2:
getting closer. if i have two unit length perpendicular vectors v1 and v2 on the plane in which the circle lies, i can do a parameterization like: x(t) = c + r * cos(t) * v1 + r * sin(t) * v2
so i take v1 = a-c and i only need to find v2 now. any ideas?
Martin Doms recently wrote a blog entry about splines and bezier curves that you might find useful.
Part of his post describes how to get a 2D curve defined by the three control points P0, P1, and P2. The curve is parameterized by a value t that ranges from 0 to 1:
F(t) = (1-t)2 P0 + 2t (1-t) P1 + t2 P2
It seems likely that you could adapt that to 3D with a little thought. (Of course, bezier curves don't necessarily go through the control points. This may not work if that's a deal-breaker for you.)
As an aside, Jason Davies put together a nice little animation of curve interpolation.
Got back to this one and it was quite tricky. The code is as short as possible, but still much more than i thought.
You can create an instance of this class and call the SolveArc method with the 3 positions (in the right order) to set up the class. Then the Arc Method will give you the positions on the circular arc from 0..1 in linear velocity. If you find a shorter solution, please let me know.
class ArcSolver
{
public Vector3D Center { get; private set; }
public double Radius { get; private set; }
public double Angle { get; private set; }
Vector3D FDirP1, FDirP2;
//get arc position at t [0..1]
public Vector3D Arc(double t)
{
var x = t*Angle;
return Center + Radius * Math.Cos(x) * FDirP1 + Radius * Math.Sin(x) * FDirP2;
}
//Set the points, the arc will go from P1 to P3 though P2.
public bool SolveArc(Vector3D P1, Vector3D P2, Vector3D P3)
{
//to read this code you need to know that the Vector3D struct has
//many overloaded operators:
//~ normalize
//| dot product
//& cross product, left handed
//! length
Vector3D O = (P2 + P3) / 2;
Vector3D C = (P1 + P3) / 2;
Vector3D X = (P2 - P1) / -2;
Vector3D N = (P3 - P1).CrossRH(P2 - P1);
Vector3D D = ~N.CrossRH(P2 - O);
Vector3D V = ~(P1 - C);
double check = D|V;
Angle = Math.PI;
var exist = false;
if (check != 0)
{
double t = (X|V) / check;
Center = O + D*t;
Radius = !(Center - P1);
Vector3D V1 = ~(P1 - Center);
//vector from center to P1
FDirP1 = V1;
Vector3D V2 = ~(P3 - Center);
Angle = Math.Acos(V1|V2);
if (Angle != 0)
{
exist = true;
V1 = P2-P1;
V2 = P2-P3;
if ((V1|V2) > 0)
{
Angle = Math.PI * 2 - Angle;
}
}
}
//vector from center to P2
FDirP2 = ~(-N.CrossRH(P1 - Center));
return exist;
}
}
So this answer is part of the story, given that the code is in Mathematica rather than C#, but certainly all of the maths (with perhaps one minor exception) should all be relatively easy to translate to any language.
The basic approach presented is to:
Project the three points (A, B, C) onto the plane that those points lie in. It should have a normal AB x BC. This reduces the problem from three dimensions to two.
Use your favourite technique for finding the center of the circle that passes through the three projected points.
Unproject the center of the circle back into three dimensions.
Use an appropriate spherical interpolation strategy (slerp is used in the sample, but I believe it would have been better to use quaternions).
The one caveat is that you need to work out which direction you are rotating in, I'm sure there are smarter ways, but with only two alternatives, rejection testing is sufficient. I'm using reduce, but you'd probably need to do something slightly different to do this in C#
No guarantees that this is the most numerically stable or robust way to do this, or that there are any corner cases that have been missed.
(* Perpendicular vector in 2 dimensions *)
Perp2d[v_] := {-v[[2]], v[[1]]};
(* Spherical linear interpolation. From wikipedia \
http://en.wikipedia.org/wiki/Slerp *)
slerp[p0_, p1_, t_, rev_] :=
Module[{\[CapitalOmega], v},
\[CapitalOmega] = ArcCos[Dot[p0, p1]];
\[CapitalOmega] =
If[rev == 0, 2 Pi - \[CapitalOmega], \[CapitalOmega]];
v = (Sin[(1 - t) \[CapitalOmega]]/
Sin[\[CapitalOmega]]) p0 + (Sin[t \[CapitalOmega]]/
Sin[\[CapitalOmega]]) p1;
Return[v]
];
(* Based on the expressions from mathworld \
http://mathworld.wolfram.com/Line-LineIntersection.html *)
IntersectionLineLine[{x1_, y1_}, {x2_, y2_}, {x3_, y3_}, {x4_, y4_}] :=
Module[{x, y, A, B, C},
A = Det[{{x1, y1}, {x2, y2}}];
B = Det[{{x3, y3}, {x4, y4}}];
C = Det[{{x1 - x2, y1 - y2}, {x3 - x4, y3 - y4}}];
x = Det[{{A, x1 - x2}, {B, x3 - x4}}]/C;
y = Det[{{A, y1 - y2}, {B, y3 - y4}}]/C;
Return[{x, y}]
]
(* Based on Paul Bourke's Notes \
http://local.wasp.uwa.edu.au/~pbourke/geometry/circlefrom3/ *)
CircleFromThreePoints2D[v1_, v2_, v3_] :=
Module[{v12, v23, mid12, mid23, v12perp, v23perp, c, r},
v12 = v2 - v1;
v23 = v3 - v2;
mid12 = Mean[{v1, v2}];
mid23 = Mean[{v2, v3}];
c = IntersectionLineLine[
mid12, mid12 + Perp2d[v12],
mid23, mid23 + Perp2d[v23]
];
r = Norm[c - v1];
Assert[r == Norm[c - v2]];
Assert[r == Norm[c - v3]];
Return[{c, r}]
]
(* Projection from 3d to 2d *)
CircleFromThreePoints3D[v1_, v2_, v3_] :=
Module[{v12, v23, vnorm, b1, b2, va, vb, vc, xc, xr, yc, yr},
v12 = v2 - v1;
v23 = v3 - v2;
vnorm = Cross[v12, v23];
b1 = Normalize[v12];
b2 = Normalize[Cross[v12, vnorm]];
va = {0, 0};
vb = {Dot[v2, b1], Dot[v2, b2]};
vc = {Dot[v3, b1], Dot[v3, b2]};
{xc, xr} = CircleFromThreePoints2D[va, vb, vc];
yc = xc[[1]] b1 + xc[[2]] b2;
yr = Norm[v1 - yc];
Return[{yc, yr, b1, b2}]
]
v1 = {0, 0, 0};
v2 = {5, 3, 7};
v3 = {6, 4, 2};
(* calculate the center of the circle, radius, and basis vectors b1 \
and b2 *)
{yc, yr, b1, b2} = CircleFromThreePoints3D[v1, v2, v3];
(* calculate the path of motion, given an arbitrary direction *)
path = Function[{t, d},
yc + yr slerp[(v1 - yc)/yr, (v3 - yc)/yr, t, d]];
(* correct the direction of rotation if necessary *)
dirn = If[
TrueQ[Reduce[{path[t, 1] == v2, t >= 0 && t <= 1}, t] == False], 0,
1]
(* Plot Results *)
gr1 = ParametricPlot3D[path[t, dirn], {t, 0.0, 1.0}];
gr2 = ParametricPlot3D[Circle3d[b1, b2, yc, yr][t], {t, 0, 2 Pi}];
Show[
gr1,
Graphics3D[Line[{v1, v1 + b1}]],
Graphics3D[Line[{v1, v1 + b2}]],
Graphics3D[Sphere[v1, 0.1]],
Graphics3D[Sphere[v2, 0.1]],
Graphics3D[{Green, Sphere[v3, 0.1]}],
Graphics3D[Sphere[yc, 0.2]],
PlotRange -> Transpose[{yc - 1.2 yr, yc + 1.2 yr}]
]
Which looks something like this: