temp commit
[SARndbox.git] / CalibrateProjector.cpp
blob150356bc34ad3dfad4ec8c84915a0ff38f208776
1 /***********************************************************************
2 CalibrateProjector - Utility to calculate the calibration transformation
3 of a projector into a Kinect-captured 3D space.
4 Copyright (c) 2012-2018 Oliver Kreylos
6 This file is part of the Augmented Reality Sandbox (SARndbox).
8 The Augmented Reality Sandbox is free software; you can redistribute it
9 and/or modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2 of the
11 License, or (at your option) any later version.
13 The Augmented Reality Sandbox is distributed in the hope that it will be
14 useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License along
19 with the Augmented Reality Sandbox; if not, write to the Free Software
20 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ***********************************************************************/
23 #include "CalibrateProjector.h"
25 #include <stdlib.h>
26 #include <string.h>
27 #include <string>
28 #include <stdexcept>
29 #include <iostream>
30 #include <iomanip>
31 #include <Misc/FunctionCalls.h>
32 #include <IO/ValueSource.h>
33 #include <IO/CSVSource.h>
34 #include <IO/File.h>
35 #include <IO/OpenFile.h>
36 #include <Cluster/OpenPipe.h>
37 #include <Math/Math.h>
38 #include <Math/Constants.h>
39 #include <Math/Interval.h>
40 #include <Geometry/GeometryValueCoders.h>
41 #include <GL/gl.h>
42 #include <GL/GLGeometryWrappers.h>
43 #include <GL/GLTransformationWrappers.h>
44 #include <Vrui/Vrui.h>
45 #include <Vrui/VRScreen.h>
46 #include <Vrui/ToolManager.h>
47 #include <Vrui/DisplayState.h>
48 #include <Vrui/OpenFile.h>
49 #include <Kinect/DirectFrameSource.h>
50 #include <Kinect/OpenDirectFrameSource.h>
51 #include <Kinect/Camera.h>
52 #include <Kinect/MultiplexedFrameSource.h>
54 #include "Config.h"
56 /********************************************************
57 Static elements of class CalibrateProjector::CaptureTool:
58 ********************************************************/
60 CalibrateProjector::CaptureToolFactory* CalibrateProjector::CaptureTool::factory = 0;
62 /************************************************
63 Methods of class CalibrateProjector::CaptureTool:
64 ************************************************/
66 CalibrateProjector::CaptureTool::CaptureTool(const Vrui::ToolFactory* factory,
67 const Vrui::ToolInputAssignment& inputAssignment)
68 : Vrui::Tool(factory, inputAssignment) {
71 CalibrateProjector::CaptureTool::~CaptureTool(void) {
74 const Vrui::ToolFactory* CalibrateProjector::CaptureTool::getFactory(void) const {
75 return factory;
78 void CalibrateProjector::CaptureTool::buttonCallback(int buttonSlotIndex,
79 Vrui::InputDevice::ButtonCallbackData* cbData) {
80 /* Start capturing a depth frame if the button was just pressed: */
81 if(cbData->newButtonState) {
82 if(buttonSlotIndex == 0)
83 application->startTiePointCapture();
84 else
85 application->startBackgroundCapture();
89 /***********************************
90 Methods of class CalibrateProjector:
91 ***********************************/
93 void CalibrateProjector::depthStreamingCallback(const Kinect::FrameBuffer& frameBuffer) {
94 /* Forward depth frame to the sphere extractor: */
95 diskExtractor->submitFrame(frameBuffer);
97 /* Forward depth frame to the projector: */
98 projector->setDepthFrame(frameBuffer);
100 #if KINECT_CONFIG_USE_SHADERPROJECTOR
101 /* Update application state: */
102 Vrui::requestUpdate();
103 #endif
106 #if !KINECT_CONFIG_USE_SHADERPROJECTOR
108 void CalibrateProjector::meshStreamingCallback(const Kinect::MeshBuffer& meshBuffer) {
109 /* Update application state: */
110 Vrui::requestUpdate();
113 #endif
115 void CalibrateProjector::backgroundCaptureCompleteCallback(Kinect::DirectFrameSource&) {
116 /* Reset the background capture flag: */
117 std::cout << " done" << std::endl;
118 capturingBackground = false;
120 /* Enable background removal: */
121 dynamic_cast<Kinect::DirectFrameSource*>(camera)->setRemoveBackground(true);
123 /* Wake up the foreground thread: */
124 Vrui::requestUpdate();
127 void CalibrateProjector::diskExtractionCallback(const Kinect::DiskExtractor::DiskList& disks) {
128 /* Store the new disk list in the triple buffer: */
129 Kinect::DiskExtractor::DiskList& newList = diskList.startNewValue();
130 newList = disks;
131 diskList.postNewValue();
133 /* Wake up the main thread: */
134 Vrui::requestUpdate();
137 CalibrateProjector::CalibrateProjector(int& argc, char**& argv)
138 : Vrui::Application(argc, argv),
139 numTiePointFrames(60), numBackgroundFrames(120),
140 camera(0), diskExtractor(0), projector(0),
141 capturingBackground(false), capturingTiePoint(false), numCaptureFrames(0),
142 tiePointIndex(0),
143 haveProjection(false), projection(4, 4) {
144 /* Register the custom tool class: */
145 CaptureToolFactory* toolFactory1 = new CaptureToolFactory("CaptureTool", "Capture", 0,
146 *Vrui::getToolManager());
147 toolFactory1->setNumButtons(2);
148 toolFactory1->setButtonFunction(0, "Capture Tie Point");
149 toolFactory1->setButtonFunction(1, "Capture Background");
150 Vrui::getToolManager()->addClass(toolFactory1, Vrui::ToolManager::defaultToolFactoryDestructor);
152 /* Process command line parameters: */
153 bool printHelp = false;
154 std::string sandboxLayoutFileName = CONFIG_CONFIGDIR;
155 sandboxLayoutFileName.push_back('/');
156 sandboxLayoutFileName.append(CONFIG_DEFAULTBOXLAYOUTFILENAME);
157 projectionMatrixFileName = CONFIG_CONFIGDIR;
158 projectionMatrixFileName.push_back('/');
159 projectionMatrixFileName.append(CONFIG_DEFAULTPROJECTIONMATRIXFILENAME);
160 Kinect::MultiplexedFrameSource* remoteSource = 0;
161 int cameraIndex = 0;
162 imageSize[0] = 1024;
163 imageSize[1] = 768;
164 numTiePoints[0] = 4;
165 numTiePoints[1] = 3;
166 int blobMergeDepth = 2;
167 const char* tiePointFileName = 0;
168 for(int i = 1; i < argc; ++i) {
169 if(argv[i][0] == '-') {
170 if(strcasecmp(argv[i] + 1, "h") == 0)
171 printHelp = true;
172 else if(strcasecmp(argv[i] + 1, "slf") == 0) {
173 ++i;
174 if(i < argc)
175 sandboxLayoutFileName = argv[i];
176 } else if(strcasecmp(argv[i] + 1, "r") == 0) {
177 i += 2;
178 if(i < argc) {
179 /* Open a connection to a remote Kinect server: */
180 remoteSource = Kinect::MultiplexedFrameSource::create(Cluster::openTCPPipe(
181 Vrui::getClusterMultiplexer(), argv[i - 1], atoi(argv[i])));
183 } else if(strcasecmp(argv[i] + 1, "c") == 0) {
184 ++i;
185 if(i < argc)
186 cameraIndex = atoi(argv[i]);
187 } else if(strcasecmp(argv[i] + 1, "s") == 0) {
188 if(i + 2 < argc) {
189 for(int j = 0; j < 2; ++j) {
190 ++i;
191 imageSize[j] = atoi(argv[i]);
194 } else if(strcasecmp(argv[i] + 1, "tp") == 0) {
195 if(i + 2 < argc) {
196 for(int j = 0; j < 2; ++j) {
197 ++i;
198 numTiePoints[j] = atoi(argv[i]);
201 } else if(strcasecmp(argv[i] + 1, "bmd") == 0) {
202 ++i;
203 if(i < argc)
204 blobMergeDepth = atoi(argv[i]);
205 } else if(strcasecmp(argv[i] + 1, "tpf") == 0) {
206 ++i;
207 if(i < argc)
208 tiePointFileName = argv[i];
209 } else if(strcasecmp(argv[i] + 1, "pmf") == 0) {
210 ++i;
211 if(i < argc)
212 projectionMatrixFileName = argv[i];
217 if(printHelp) {
218 std::cout << "Usage: CalibrateProjector [option 1] ... [option n]" << std::endl;
219 std::cout << " Options:" << std::endl;
220 std::cout << " -h" << std::endl;
221 std::cout << " Prints this help message" << std::endl;
222 std::cout << " -slf <sandbox layout file name>" << std::endl;
223 std::cout << " Loads the sandbox layout file of the given name" << std::endl;
224 std::cout << " Default: " << CONFIG_CONFIGDIR << '/' << CONFIG_DEFAULTBOXLAYOUTFILENAME <<
225 std::endl;
226 std::cout << " -r <server host name> <server port number>" << std::endl;
227 std::cout << " Connects to a remote 3D video server on the given host name /" << std::endl;
228 std::cout << " port number" << std::endl;
229 std::cout << " Default: <empty>" << std::endl;
230 std::cout << " -c <camera index>" << std::endl;
231 std::cout << " Selects the 3D camera of the given index on the local USB bus or" << std::endl;
232 std::cout << " on the remote 3D video server (0: first camera)" << std::endl;
233 std::cout << " Default: 0" << std::endl;
234 std::cout << " -s <projector image width> <projector image height>" << std::endl;
235 std::cout << " Sets the width and height of the projector image in pixels. This" << std::endl;
236 std::cout << " must match the actual resolution of the projector." << std::endl;
237 std::cout << " Default: 1024 768" << std::endl;
238 std::cout << " -tp <grid width> <grid height>" << std::endl;
239 std::cout << " Sets the number of tie points to be collected before a calibration" <<
240 std::endl;
241 std::cout << " is computed." << std::endl;
242 std::cout << " Default: 4 3" << std::endl;
243 std::cout << " -bmd <mamximum blob merge depth distance>" << std::endl;
244 std::cout << " Maximum depth distance between adjacent pixels in the same blob." << std::endl;
245 std::cout << " Default: 1" << std::endl;
246 std::cout << " -tpf <tie point file name>" << std::endl;
247 std::cout << " Reads initial calibration tie points from a CSV file" << std::endl;
248 std::cout << " -pmf <projection matrix file name>" << std::endl;
249 std::cout << " Saves the calibration matrix to the file of the given name" << std::endl;
250 std::cout << " Default: " << CONFIG_CONFIGDIR << '/' << CONFIG_DEFAULTPROJECTIONMATRIXFILENAME
251 << std::endl;
254 /* Read the sandbox layout file: */
256 IO::ValueSource layoutSource(Vrui::openFile(sandboxLayoutFileName.c_str()));
257 layoutSource.skipWs();
258 std::string s = layoutSource.readLine();
259 basePlane = Misc::ValueCoder<OPlane>::decode(s.c_str(), s.c_str() + s.length());
260 basePlane.normalize();
261 for(int i = 0; i < 4; ++i) {
262 layoutSource.skipWs();
263 s = layoutSource.readLine();
264 basePlaneCorners[i] = basePlane.project(Misc::ValueCoder<OPoint>::decode(s.c_str(),
265 s.c_str() + s.length()));
269 /* Calculate the transformation from camera space to sandbox space: */
271 ONTransform::Vector z = basePlane.getNormal();
272 ONTransform::Vector x = (basePlaneCorners[1] - basePlaneCorners[0]) +
273 (basePlaneCorners[3] - basePlaneCorners[2]);
274 x.orthogonalize(z);
275 ONTransform::Vector y = z ^ x;
276 boxTransform = ONTransform::rotate(Geometry::invert(ONTransform::Rotation::fromBaseVectors(x, y)));
277 ONTransform::Point center = Geometry::mid(Geometry::mid(basePlaneCorners[0], basePlaneCorners[1]),
278 Geometry::mid(basePlaneCorners[2], basePlaneCorners[3]));
279 boxTransform *= ONTransform::translateToOriginFrom(basePlane.project(center));
282 /* Calculate a bounding box around the sandbox area: */
283 bbox = Box::empty;
284 for(int i = 0; i < 4; ++i)
285 bbox.addPoint(boxTransform.transform(basePlaneCorners[i]));
287 if(tiePointFileName != 0) {
288 /* Read the tie point file: */
289 IO::CSVSource tiePointFile(IO::openFile(tiePointFileName));
290 while(!tiePointFile.eof()) {
291 /* Read the tie point: */
292 TiePoint tp;
293 for(int i = 0; i < 2; ++i)
294 tp.p[i] = tiePointFile.readField<double>();
295 for(int i = 0; i < 3; ++i)
296 tp.o[i] = tiePointFile.readField<double>();
298 tiePoints.push_back(tp);
301 if(tiePoints.size() >= size_t(numTiePoints[0]*numTiePoints[1])) {
302 /* Calculate an initial calibration: */
303 calcCalibration();
307 /* Open the requested 3D video source: */
308 if(remoteSource != 0) {
309 /* Open the camera of selected index on the remote server: */
310 camera = remoteSource->getStream(cameraIndex);
311 } else {
312 /* Open the camera of selected index on the local USB bus: */
313 Kinect::DirectFrameSource* directCamera = Kinect::openDirectFrameSource(cameraIndex);
314 camera = directCamera;
316 /* Set some camera type-specific parameters: */
317 directCamera->setBackgroundRemovalFuzz(1);
319 /* Check if the camera is a first-generation Kinect: */
320 Kinect::Camera* kinectV1 = dynamic_cast<Kinect::Camera*>(directCamera);
321 if(kinectV1 != 0) {
322 /* Set Kinect v1-specific parameters: */
323 kinectV1->setCompressDepthFrames(true);
324 kinectV1->setSmoothDepthFrames(false);
328 /* Create a disk extractor for the 3D video source: */
329 diskExtractor = new Kinect::DiskExtractor(camera->getActualFrameSize(Kinect::FrameSource::DEPTH),
330 camera->getDepthCorrectionParameters(), camera->getIntrinsicParameters());
331 diskExtractor->setMaxBlobMergeDist(blobMergeDepth);
332 diskExtractor->setMinNumPixels(250);
333 diskExtractor->setDiskRadius(6.0);
334 diskExtractor->setDiskRadiusMargin(1.10);
335 diskExtractor->setDiskFlatness(1.0);
337 /* Create a projector for the 3D video source: */
338 projector = new Kinect::ProjectorType(*camera);
339 projector->setTriangleDepthRange(blobMergeDepth);
341 /* Reset the projector's extrinsic parameters: */
342 projector->setExtrinsicParameters(Kinect::FrameSource::ExtrinsicParameters::identity);
344 #if KINECT_CONFIG_USE_PROJECTOR2
346 /* Disable color mapping and illumination on the projector: */
347 projector->setMapTexture(false);
348 projector->setIlluminate(false);
350 #endif
352 /* Start streaming from the 3D video source and extracting disks: */
353 diskExtractor->startStreaming(Misc::createFunctionCall(this,
354 &CalibrateProjector::diskExtractionCallback));
355 #if !KINECT_CONFIG_USE_SHADERPROJECTOR
356 projector->startStreaming(Misc::createFunctionCall(this,
357 &CalibrateProjector::meshStreamingCallback));
358 #endif
359 camera->startStreaming(Misc::createFunctionCall(projector, &Kinect::ProjectorType::setColorFrame),
360 Misc::createFunctionCall(this, &CalibrateProjector::depthStreamingCallback));
362 /* Start capturing the initial background frame: */
363 startBackgroundCapture();
366 CalibrateProjector::~CalibrateProjector(void) {
367 /* Stop streaming from the 3D video source: */
368 camera->stopStreaming();
369 diskExtractor->stopStreaming();
371 /* Clean up: */
372 delete diskExtractor;
373 delete projector;
374 delete camera;
377 void CalibrateProjector::frame(void) {
378 /* Check if we are capturing a tie point and there is a new list of extracted disks: */
379 if(diskList.lockNewValue() && capturingTiePoint && diskList.getLockedValue().size() == 1) {
380 /* Access the only extracted disk: */
381 const Kinect::DiskExtractor::Disk& disk = diskList.getLockedValue().front();
383 /* Check if there is a real disk center position: */
384 bool diskValid = true;
385 for(int i = 0; i < 3; ++i)
386 diskValid = diskValid && Math::isFinite(disk.center[i]);
388 #if 0
390 /* Check if the disk is inside the sandbox area: */
391 diskValid = diskValid
392 && (basePlane.getNormal() ^ (basePlaneCorners[1] - basePlaneCorners[0])) *
393 (disk.center - basePlaneCorners[0]) >= 0.0;
394 diskValid = diskValid
395 && (basePlane.getNormal() ^ (basePlaneCorners[3] - basePlaneCorners[1])) *
396 (disk.center - basePlaneCorners[1]) >= 0.0;
397 diskValid = diskValid
398 && (basePlane.getNormal() ^ (basePlaneCorners[2] - basePlaneCorners[3])) *
399 (disk.center - basePlaneCorners[3]) >= 0.0;
400 diskValid = diskValid
401 && (basePlane.getNormal() ^ (basePlaneCorners[0] - basePlaneCorners[2])) *
402 (disk.center - basePlaneCorners[2]) >= 0.0;
404 #endif
406 if(diskValid) {
407 /* Store the just-captured tie point: */
408 TiePoint tp;
409 int xIndex = tiePointIndex % numTiePoints[0];
410 int yIndex = (tiePointIndex / numTiePoints[0]) % numTiePoints[1];
411 int x = (xIndex + 1) * imageSize[0] / (numTiePoints[0] + 1);
412 int y = (yIndex + 1) * imageSize[1] / (numTiePoints[1] + 1);
413 tp.p = PPoint(Scalar(x) + Scalar(0.5), Scalar(y) + Scalar(0.5));
414 tp.o = disk.center;
415 tiePoints.push_back(tp);
417 /* Check if that's enough: */
418 --numCaptureFrames;
419 if(numCaptureFrames == 0) {
420 /* Stop capturing this tie point and move to the next: */
421 std::cout << "done" << std::endl;
422 capturingTiePoint = false;
423 ++tiePointIndex;
425 /* Check if the calibration is complete: */
426 if(tiePointIndex >= numTiePoints[0]*numTiePoints[1]) {
427 /* Calculate the calibration transformation: */
428 calcCalibration();
434 /* Update the projector: */
435 projector->updateFrames();
438 void CalibrateProjector::display(GLContextData& contextData) const {
439 /* Set up OpenGL state: */
440 glPushAttrib(GL_ENABLE_BIT | GL_LINE_BIT);
441 glDisable(GL_CULL_FACE);
442 glDisable(GL_DEPTH_TEST);
443 glDisable(GL_LIGHTING);
444 glLineWidth(1.0f);
446 if(capturingBackground) {
447 /* Go to screen space: */
448 glPushMatrix();
449 glLoadIdentity();
450 glMatrixMode(GL_PROJECTION);
451 glPushMatrix();
452 glLoadIdentity();
453 glOrtho(0.0, double(imageSize[0]), 0.0, double(imageSize[1]), -1.0, 1.0);
455 /* Indicate that a background frame is being captured: */
456 glBegin(GL_QUADS);
457 glColor3f(1.0f, 0.0f, 0.0f);
458 glVertex2f(0.0f, 0.0f);
459 glVertex2f(float(imageSize[0]), 0.0f);
460 glVertex2f(float(imageSize[0]), float(imageSize[1]));
461 glVertex2f(0.0f, float(imageSize[1]));
462 glEnd();
464 /* Return to navigational space: */
465 glPopMatrix();
466 glMatrixMode(GL_MODELVIEW);
467 glPopMatrix();
468 } else {
469 /* Set up an orthographic projection showing the sandbox area from above: */
470 glMatrixMode(GL_PROJECTION);
471 glPushMatrix();
472 glLoadIdentity();
474 /* Match the sandbox area's aspect ratio against the display screen: */
475 Scalar bbw = bbox.getSize(0);
476 Scalar bbh = bbox.getSize(1);
477 const Vrui::VRScreen* screen = Vrui::getDisplayState(contextData).screen;
478 Scalar sw = screen->getWidth();
479 Scalar sh = screen->getHeight();
480 if(bbw * sh >= sw * bbh) { // Sandbox area is wider
481 Scalar filler = Math::div2((bbw * sh) / sw - bbh);
482 glOrtho(bbox.min[0], bbox.max[0], bbox.min[1] - filler, bbox.max[1] + filler, -200.0, 200.0);
483 } else { // Sandbox area is taller
484 Scalar filler = Math::div2((bbh * sw) / sh - bbw);
485 glOrtho(bbox.min[0] - filler, bbox.max[0] + filler, bbox.min[0], bbox.max[0], -200.0, 200.0);
488 /* Transform camera space to sandbox space: */
489 glMatrixMode(GL_MODELVIEW);
490 glPushMatrix();
491 glLoadMatrix(boxTransform);
493 /* Draw the sandbox outline: */
494 glBegin(GL_LINE_LOOP);
495 glColor3f(1.0f, 1.0f, 0.0f);
496 glVertex(basePlaneCorners[0]);
497 glVertex(basePlaneCorners[1]);
498 glVertex(basePlaneCorners[3]);
499 glVertex(basePlaneCorners[2]);
500 glEnd();
502 /* Draw the current 3D video facade: */
503 glColor3f(1.0f, 1.0f, 0.0f);
504 projector->glRenderAction(contextData);
506 /* Draw all currently extracted disks: */
507 const Kinect::DiskExtractor::DiskList& dl = diskList.getLockedValue();
508 for(Kinect::DiskExtractor::DiskList::const_iterator dlIt = dl.begin(); dlIt != dl.end(); ++dlIt) {
509 glPushMatrix();
510 glTranslate(dlIt->center - Kinect::DiskExtractor::Point::origin);
511 glRotate(Vrui::Rotation::rotateFromTo(Vrui::Vector(0, 0, 1), Vrui::Vector(dlIt->normal)));
513 glBegin(GL_POLYGON);
514 glColor3f(0.0f, 1.0f, 0.0f);
515 for(int i = 0; i < 64; ++i) {
516 Vrui::Scalar angle = Vrui::Scalar(i) * Vrui::Scalar(2) * Math::Constants<Vrui::Scalar>::pi /
517 Vrui::Scalar(64);
518 glVertex3d(Math::cos(angle)*dlIt->radius, Math::sin(angle)*dlIt->radius, 0.0);
520 glEnd();
522 glPopMatrix();
525 /* Go to screen space: */
526 glMatrixMode(GL_PROJECTION);
527 glLoadIdentity();
528 glOrtho(0.0, double(imageSize[0]), 0.0, double(imageSize[1]), -1.0, 1.0);
529 glMatrixMode(GL_MODELVIEW);
530 glLoadIdentity();
532 /* Calculate the screen-space position of the next tie point: */
533 int xIndex = tiePointIndex % numTiePoints[0];
534 int yIndex = (tiePointIndex / numTiePoints[0]) % numTiePoints[1];
535 int x = (xIndex + 1) * imageSize[0] / (numTiePoints[0] + 1);
536 int y = (yIndex + 1) * imageSize[1] / (numTiePoints[1] + 1);
538 /* Draw the next tie point: */
539 glBegin(GL_LINES);
540 glColor3f(1.0f, 1.0f, 1.0f);
541 glVertex2f(0.0f, float(y) + 0.5f);
542 glVertex2f(float(imageSize[0]), float(y) + 0.5f);
543 glVertex2f(float(x) + 0.5f, 0.0f);
544 glVertex2f(float(x) + 0.5f, float(imageSize[1]));
545 glEnd();
547 if(haveProjection) {
548 /* Draw all currently extracted disks using the current calibration: */
549 for(Kinect::DiskExtractor::DiskList::const_iterator dlIt = dl.begin(); dlIt != dl.end(); ++dlIt) {
550 Math::Matrix blob(4, 1);
551 for(int i = 0; i < 3; ++i)
552 blob(i) = dlIt->center[i];
553 blob(3) = 1.0;
554 Math::Matrix projBlob = projection * blob;
555 double x = (projBlob(0) / projBlob(3) + 1.0) * double(imageSize[0]) / 2.0;
556 double y = (projBlob(1) / projBlob(3) + 1.0) * double(imageSize[1]) / 2.0;
557 glBegin(GL_LINES);
558 glColor3f(1.0f, 0.0f, 0.0f);
559 glVertex2d(x, 0.0);
560 glVertex2d(x, double(imageSize[1]));
561 glVertex2d(0.0, y);
562 glVertex2d(double(imageSize[0]), y);
563 glEnd();
567 /* Return to navigational space: */
568 glMatrixMode(GL_PROJECTION);
569 glPopMatrix();
570 glMatrixMode(GL_MODELVIEW);
571 glPopMatrix();
574 glPopAttrib();
577 void CalibrateProjector::startBackgroundCapture(void) {
578 /* Bail out if already capturing a tie point or background: */
579 if(capturingBackground || capturingTiePoint)
580 return;
582 /* Check if this is a directly-connected 3D camera: */
583 Kinect::DirectFrameSource* directCamera = dynamic_cast<Kinect::DirectFrameSource*>(camera);
584 if(directCamera != 0) {
585 /* Tell the 3D camera to capture a new background frame: */
586 capturingBackground = true;
587 std::cout << "CalibrateProjector: Capturing " << numBackgroundFrames << " background frames..." <<
588 std::flush;
589 directCamera->captureBackground(numBackgroundFrames, true, Misc::createFunctionCall(this,
590 &CalibrateProjector::backgroundCaptureCompleteCallback));
594 void CalibrateProjector::startTiePointCapture(void) {
595 /* Bail out if already capturing a tie point or background: */
596 if(capturingBackground || capturingTiePoint)
597 return;
599 /* Start capturing a new tie point: */
600 capturingTiePoint = true;
601 numCaptureFrames = numTiePointFrames;
602 std::cout << "CalibrateProjector: Capturing " << numTiePointFrames << " tie point frames..." <<
603 std::flush;
606 void CalibrateProjector::calcCalibration(void) {
607 /* Create the least-squares system: */
608 Math::Matrix a(12, 12, 0.0);
610 /* Process all tie points: */
611 for(std::vector<TiePoint>::iterator tpIt = tiePoints.begin(); tpIt != tiePoints.end(); ++tpIt) {
612 // DEBUGGING
613 // std::cout<<"Tie point: "<<tpIt->p[0]<<", "<<tpIt->p[1]<<", "<<tpIt->o[0]<<", "<<tpIt->o[1]<<", "<<tpIt->o[2]<<std::endl;
615 /* Create the tie point's associated two linear equations: */
616 double eq[2][12];
617 eq[0][0] = tpIt->o[0];
618 eq[0][1] = tpIt->o[1];
619 eq[0][2] = tpIt->o[2];
620 eq[0][3] = 1.0;
621 eq[0][4] = 0.0;
622 eq[0][5] = 0.0;
623 eq[0][6] = 0.0;
624 eq[0][7] = 0.0;
625 eq[0][8] = -tpIt->p[0] * tpIt->o[0];
626 eq[0][9] = -tpIt->p[0] * tpIt->o[1];
627 eq[0][10] = -tpIt->p[0] * tpIt->o[2];
628 eq[0][11] = -tpIt->p[0];
630 eq[1][0] = 0.0;
631 eq[1][1] = 0.0;
632 eq[1][2] = 0.0;
633 eq[1][3] = 0.0;
634 eq[1][4] = tpIt->o[0];
635 eq[1][5] = tpIt->o[1];
636 eq[1][6] = tpIt->o[2];
637 eq[1][7] = 1.0;
638 eq[1][8] = -tpIt->p[1] * tpIt->o[0];
639 eq[1][9] = -tpIt->p[1] * tpIt->o[1];
640 eq[1][10] = -tpIt->p[1] * tpIt->o[2];
641 eq[1][11] = -tpIt->p[1];
643 /* Insert the two equations into the least-squares system: */
644 for(int row = 0; row < 2; ++row) {
645 for(unsigned int i = 0; i < 12; ++i)
646 for(unsigned int j = 0; j < 12; ++j)
647 a(i, j) += eq[row][i] * eq[row][j];
651 /* Find the least square system's smallest eigenvalue: */
652 std::pair<Math::Matrix, Math::Matrix> qe = a.jacobiIteration();
653 unsigned int minEIndex = 0;
654 double minE = Math::abs(qe.second(0, 0));
655 for(unsigned int i = 1; i < 12; ++i) {
656 if(minE > Math::abs(qe.second(i, 0))) {
657 minEIndex = i;
658 minE = Math::abs(qe.second(i, 0));
662 /* Create the initial unscaled homography: */
663 Math::Matrix hom(3, 4);
664 for(int i = 0; i < 3; ++i)
665 for(int j = 0; j < 4; ++j)
666 hom(i, j) = qe.first(i * 4 + j, minEIndex);
668 /* Scale the homography such that projected weights are positive distance from projector: */
669 double wLen = Math::sqrt(Math::sqr(hom(2, 0)) + Math::sqr(hom(2, 1)) + Math::sqr(hom(2, 2)));
670 int numNegativeWeights = 0;
671 for(std::vector<TiePoint>::iterator tpIt = tiePoints.begin(); tpIt != tiePoints.end(); ++tpIt) {
672 /* Calculate the object-space tie point's projected weight: */
673 double w = hom(2, 3);
674 for(int j = 0; j < 3; ++j)
675 w += hom(2, j) * tpIt->o[j];
676 if(w < 0.0)
677 ++numNegativeWeights;
679 if(numNegativeWeights == 0 || numNegativeWeights == int(tiePoints.size())) {
680 /* Scale the homography: */
681 if(numNegativeWeights > 0)
682 wLen = -wLen;
683 for(int i = 0; i < 3; ++i)
684 for(int j = 0; j < 4; ++j)
685 hom(i, j) /= wLen;
687 /* Print the scaled homography: */
688 for(int i = 0; i < 3; ++i) {
689 std::cout << std::setw(10) << hom(i, 0);
690 for(int j = 1; j < 4; ++j)
691 std::cout << " " << std::setw(10) << hom(i, j);
692 std::cout << std::endl;
695 /* Calculate the calibration residual: */
696 double res = 0.0;
697 for(std::vector<TiePoint>::iterator tpIt = tiePoints.begin(); tpIt != tiePoints.end(); ++tpIt) {
698 Math::Matrix op(4, 1);
699 for(int i = 0; i < 3; ++i)
700 op(i) = tpIt->o[i];
701 op(3) = 1.0;
703 Math::Matrix pp = hom * op;
704 for(int i = 0; i < 2; ++i)
705 pp(i) /= pp(2);
707 res += Math::sqr(pp(0) - tpIt->p[0]) + Math::sqr(pp(1) - tpIt->p[1]);
709 res = Math::sqrt(res / double(tiePoints.size()));
710 std::cout << "RMS calibration residual: " << res << std::endl;
712 /* Calculate the full projector projection matrix: */
713 for(unsigned int i = 0; i < 2; ++i)
714 for(unsigned int j = 0; j < 4; ++j)
715 projection(i, j) = hom(i, j);
716 for(unsigned int j = 0; j < 3; ++j)
717 projection(2, j) = 0.0;
718 projection(2, 3) = -1.0;
719 for(unsigned int j = 0; j < 4; ++j)
720 projection(3, j) = hom(2, j);
722 /* Calculate the z range of all tie points: */
723 Math::Interval<double> zRange = Math::Interval<double>::empty;
724 int numNegativeWeights = 0;
725 for(std::vector<TiePoint>::iterator tpIt = tiePoints.begin(); tpIt != tiePoints.end(); ++tpIt) {
726 /* Transform the object-space tie point with the projection matrix: */
727 Math::Matrix op(4, 1);
728 for(int i = 0; i < 3; ++i)
729 op(i) = double(tpIt->o[i]);
730 op(3) = 1.0;
731 Math::Matrix pp = projection * op;
732 if(pp(3) < 0.0)
733 ++numNegativeWeights;
734 zRange.addValue(pp(2) / pp(3));
736 std::cout << "Z range of collected tie points: [" << zRange.getMin() << ", " << zRange.getMax() <<
737 "]" << std::endl;
739 /* Double the size of the range to include a safety margin on either side: */
740 zRange = Math::Interval<double>(zRange.getMin() * 2.0, zRange.getMax() * 0.5);
742 /* Pre-multiply the projection matrix with the inverse viewport matrix to go to clip coordinates: */
743 Math::Matrix invViewport(4, 4, 1.0);
744 invViewport(0, 0) = 2.0 / double(imageSize[0]);
745 invViewport(0, 3) = -1.0;
746 invViewport(1, 1) = 2.0 / double(imageSize[1]);
747 invViewport(1, 3) = -1.0;
748 invViewport(2, 2) = 2.0 / (zRange.getSize());
749 invViewport(2, 3) = -2.0 * zRange.getMin() / (zRange.getSize()) - 1.0;
750 projection = invViewport * projection;
752 /* Write the projection matrix to a file: */
753 IO::FilePtr projFile = Vrui::openFile(projectionMatrixFileName.c_str(), IO::File::WriteOnly);
754 projFile->setEndianness(Misc::LittleEndian);
755 for(int i = 0; i < 4; ++i)
756 for(int j = 0; j < 4; ++j)
757 projFile->write<double>(projection(i, j));
759 haveProjection = true;
760 } else
761 std::cout <<
762 "Calibration error: Some tie points have negative projection weights. Please start from scratch" <<
763 std::endl;
766 /* Create and execute an application object: */
767 VRUI_APPLICATION_RUN(CalibrateProjector)