using OpenCvSharp;
using System;
using System.Collections.Generic;
using System.IO;
class CameraCalibration
{
static void Main()
{
Size chessboardSize = new Size(5, 8);
Size frameSize = new Size(640, 480);
TermCriteria criteria = new TermCriteria(CriteriaTypes.Eps | CriteriaTypes.MaxIter, 30, 0.001);
Mat objp = CreateObjectPoints(chessboardSize);
List<Mat> objpoints = new List<Mat>();
List<Mat> imgpoints = new List<Mat>();
string[] images = Directory.GetFiles(@"C:imagenes", "*.bmp");
if (images.Length == 0)
{
Console.WriteLine("No se encontraron imágenes en el directorio especificado.");
return;
}
foreach (string imageFile in images)
{
Console.WriteLine($"Procesando Imagen: {imageFile}");
ProcessImage(imageFile, chessboardSize, criteria, objpoints, imgpoints, objp);
}
Cv2.DestroyAllWindows();
if (objpoints.Count == 0 || imgpoints.Count == 0)
{
Console.WriteLine("No se encontraron esquinas válidas del tablero. No se puede realizar la calibración.");
return;
}
// Inicializar matrices intrínsecas y de distorsión
Mat cameraMatrix = Mat.Eye(3, 3, MatType.CV_64FC1);
Mat distCoeffs = new Mat(5, 1, MatType.CV_64FC1);
Mat[] rvecs, tvecs;
// Calibrar la cámara
double rms = Cv2.CalibrateCamera(objpoints, imgpoints, frameSize, cameraMatrix, distCoeffs, out rvecs, out tvecs);
Console.WriteLine($"RMS Error de Calibración: {rms}");
PrintMatrix("Matriz de Cámara:", cameraMatrix);
PrintMatrix("Coeficientes de Distorsión:", distCoeffs);
SaveCalibrationData(cameraMatrix, distCoeffs, @"C:imagenescalibration_data.bin");
UndistortImage(cameraMatrix, distCoeffs, frameSize, @"C:imagenesimage_to_undistort.bmp", @"C:imagenesundistorted_image.bmp", @"C:imagenesremapped_image.bmp");
double meanError = CalculateReprojectionError(objpoints, imgpoints, rvecs, tvecs, cameraMatrix, distCoeffs);
Console.WriteLine($"Error Total: {meanError}");
CalculateAndPrintDistance(objpoints, imgpoints, objp, cameraMatrix, distCoeffs, rvecs, tvecs);
}
static Mat CreateObjectPoints(Size chessboardSize)
{
Mat objp = new Mat(chessboardSize.Height * chessboardSize.Width, 1, MatType.CV_32FC3);
for (int i = 0; i < chessboardSize.Height; i++)
{
for (int j = 0; j < chessboardSize.Width; j++)
{
objp.Set<float>(i * chessboardSize.Width + j, 0, j * 20f);
objp.Set<float>(i * chessboardSize.Width + j, 1, i * 20f);
objp.Set<float>(i * chessboardSize.Width + j, 2, 0f);
}
}
return objp;
}
static void ProcessImage(string imageFile, Size chessboardSize, TermCriteria criteria, List<Mat> objpoints, List<Mat> imgpoints, Mat objp)
{
Mat img = Cv2.ImRead(imageFile);
if (img.Empty())
{
Console.WriteLine($"No se pudo cargar la imagen: {imageFile}");
return;
}
Mat gray = new Mat();
Cv2.CvtColor(img, gray, ColorConversionCodes.BGR2GRAY);
if (Cv2.FindChessboardCorners(gray, chessboardSize, out Point2f[] corners))
{
objpoints.Add(objp);
Cv2.CornerSubPix(gray, corners, new Size(11, 11), new Size(-1, -1), criteria);
Mat cornersMat = new Mat(corners.Length, 1, MatType.CV_32FC2);
for (int i = 0; i < corners.Length; i++)
{
cornersMat.Set(i, corners[i]);
}
imgpoints.Add(cornersMat);
Cv2.DrawChessboardCorners(img, chessboardSize, cornersMat, true);
Cv2.ImShow("Image", img);
Cv2.WaitKey(100); // Agregar tiempo de espera
}
else
{
Console.WriteLine($"Esquinas del tablero no encontradas en la imagen: {imageFile}");
}
}
static void SaveCalibrationData(Mat cameraMatrix, Mat distCoeffs, string filePath)
{
using (FileStream fs = new FileStream(filePath, FileMode.Create))
using (BinaryWriter writer = new BinaryWriter(fs))
{
WriteMatrix(writer, cameraMatrix);
WriteMatrix(writer, distCoeffs);
}
}
static void WriteMatrix(BinaryWriter writer, Mat mat)
{
writer.Write(mat.Rows);
writer.Write(mat.Cols);
for (int i = 0; i < mat.Rows; i++)
{
for (int j = 0; j < mat.Cols; j++)
{
writer.Write(mat.At<double>(i, j));
}
}
}
static double CalculateReprojectionError(List<Mat> objpoints, List<Mat> imgpoints, Mat[] rvecs, Mat[] tvecs, Mat cameraMatrix, Mat distCoeffs)
{
double totalError = 0;
for (int i = 0; i < objpoints.Count; i++)
{
Mat imgPoints2 = new Mat();
Cv2.ProjectPoints(objpoints[i], rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imgPoints2);
totalError += Cv2.Norm(imgpoints[i], imgPoints2, NormTypes.L2) / imgPoints2.Rows;
}
return totalError / objpoints.Count;
}
static void UndistortImage(Mat cameraMatrix, Mat distCoeffs, Size frameSize, string inputPath, string outputUndistortedPath, string outputRemappedPath)
{
Mat imgToUndistort = Cv2.ImRead(inputPath);
if (imgToUndistort.Empty())
{
Console.WriteLine("No se pudo cargar la imagen para eliminar la distorsión.");
return;
}
Mat newCameraMatrix = Cv2.GetOptimalNewCameraMatrix(cameraMatrix, distCoeffs, frameSize, 1, frameSize, out Rect roi);
Mat dst = new Mat();
Cv2.Undistort(imgToUndistort, dst, cameraMatrix, distCoeffs, newCameraMatrix);
Mat cropped = new Mat(dst, roi);
Cv2.ImWrite(outputUndistortedPath, cropped);
Mat map1 = new Mat(), map2 = new Mat();
Cv2.InitUndistortRectifyMap(cameraMatrix, distCoeffs, new Mat(), newCameraMatrix, frameSize, MatType.CV_16SC2, map1, map2);
Mat remapped = new Mat();
Cv2.Remap(imgToUndistort, remapped, map1, map2, InterpolationFlags.Linear);
cropped = new Mat(remapped, roi);
Cv2.ImWrite(outputRemappedPath, cropped);
}
static double CalculateDistance(Point3f objectPoint, Point2f imagePoint, Mat cameraMatrix, Mat distCoeffs, Mat rvec, Mat tvec)
{
Mat objPointsMat = new Mat(1, 1, MatType.CV_32FC3);
objPointsMat.Set(0, 0, objectPoint);
Mat imagePointsMat = new Mat();
Cv2.ProjectPoints(objPointsMat, rvec, tvec, cameraMatrix, distCoeffs, imagePointsMat);
if (imagePointsMat.Rows == 0)
return double.NaN;
Point2f projectedPoint = imagePointsMat.At<Point2f>(0, 0);
double pixelWidth = Cv2.Norm(new Vec2f(imagePoint.X, imagePoint.Y) - new Vec2f(projectedPoint.X, projectedPoint.Y));
double focalLength = cameraMatrix.At<double>(0, 0);
double realObjectWidth = 20;
return (realObjectWidth * focalLength) / pixelWidth;
}
static void CalculateAndPrintDistance(List<Mat> objpoints, List<Mat> imgpoints, Mat objp, Mat cameraMatrix, Mat distCoeffs, Mat[] rvecs, Mat[] tvecs)
{
if (objpoints.Count > 0 && imgpoints.Count > 0)
{
Point3f point3D = new Point3f(objp.At<float>(0, 0), objp.At<float>(0, 1), 0);
Point2f point2D = imgpoints[0].At<Point2f>(0, 0);
double distance = CalculateDistance(point3D, point2D, cameraMatrix, distCoeffs, rvecs[0], tvecs[0]);
Console.WriteLine($"Distancia al punto ({point2D.X}, {point2D.Y}): {distance} cm");
}
}
static void PrintMatrix(string description, Mat mat)
{
Console.WriteLine(description);
for (int i = 0; i < mat.Rows; i++)
{
for (int j = 0; j < mat.Cols; j++)
{
Console.Write($"{mat.At<double>(i, j)} ");
}
Console.WriteLine();
}
}
}
It is a single code, the problem lies when debugging the data, it provides inaccurate information, despite already plotting the images internally and obtaining the data from the internal points, it is not possible to obtain precision in the data, what do you recommend in this case?
Any recommendation or correction would be great for me, since it is a project that I want to finish as soon as possible, and if you have already worked with this library (Open CV and OpenCVsharp4) any help is welcome, thank you very much for your time.
Digital Boost is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.