% Main program code for M.Eng Project
%
% This program demonstrate the following
% 1. Estimation of Camera Projection matrices P1 and P2
% 2. Recovery of Fundamental matrix from P1 and P2
% 3. Reconstruction of the 3D grid points from P1, P2 and F
% 4. Rectification of the image pair
% 5. Verification of the disparity-depth relationship
%
% Copyright Zheshen Zhuang
% M.Eng 2005-06, Cornell University
%**************************************************************************
%Loading and displaying the images with the control points
%**************************************************************************
function main
clear all;
close all;
im1 = rgb2gray(imread('cobject.JPG'));
im2 = rgb2gray(imread('cobject2.JPG'));
% Manually recorded control points using cpselect.m
load camP1.mat
load camP2.mat
figure(1),
hold on
imagesc(im1);
colormap(gray);
plot( cpts(1,1), cpts(2,1), 'gx');
plot( cpts(1,2:end), cpts(2,2:end), 'rx');
title('View 1 of calibration object');
hold off
figure(2),
hold on
imagesc(im2);
colormap(gray);
plot( cpts2(1,1), cpts2(2,1), 'gx');
plot( cpts2(1,2:end), cpts2(2,2:end), 'rx');
title('View 2 of calibration object');
hold off
% The default pixel coordinates frames has the origin in the top left-
% handed corner with the y-axis pointing downwards. Negating the y-values
% makes the axes right-handed so that P will give positive depth
cpts(:,2) = -cpts(:,2);
cpts2(:,2) = -cpts2(:,2);
% Linear least-square estimation of the camera projection matrices.
% Details on the projection matrices are discussed in Section I and the
% estimation are described in Section III.
P1 =PPM(cpts,X);
P2 =PPM(cpts2,X);
% QR decomposition of the camera projection matrices (Section I eqn(6))
[K1,R1,t1] = decomposeCamP(P1);
[K2,R2,t2] = decomposeCamP(P2);
% Finding the optical centres (Section I eqn(8))
c1 = -inv(P1(1:3,1:3))*P1(:,4);
c2 = -inv(P2(1:3,1:3))*P2(:,4);
% Finding the fundamental matrix (Section II eqn(19))
H = inv( [P1 ; [ 0 0 0 2] ]);
P1p = P1*H;
P2p = P2*H;
F = skew(P2p(:,4))*P2p(1:3,1:3);
F = F ./ norm(F,1);
% Finding the mean residue error | cpts2'*F*cpts | of F
Fresidue = mean(abs(diag(cpts2'*F*cpts)))
% Triangulate the Maximum Likelihood 3D positions of the correspondence pairs
Xest = real(reconstruct3D(cpts,cpts2,F,P1,P2));
% Calculating the MSE between the actual 3-D points and the backprojeced
% points
MSE = mean(norm(Xest - X ,2))
% Displaying the true and reconstructed grid points on the calibration
% objects with the recovered position of the camera centres.
figure(3)
hold on
plot3(X(3,:),X(1,:),X(2,:), 'kx');
plot3(Xest(3,:),Xest(1,:),Xest(2,:), 'rx');
plot3(c1(3), c1(1), c1(2), 'go');
plot3(c2(3), c2(1), c2(2), 'co');
grid on
legend('True 3D pts','Reconstructed pts', 'camera 1','camera 2');
hold off
%--------------------------------------------------------------------------
% Computing the rectification matrices needed to rotate the two images so
% their epipolar lines lines up and runs horizonally across both images, as
% in a parallel camera setup. (SEE SECTION IV for a more detail explanation
% of the algorithm
%--------------------------------------------------------------------------
% Let the new x-axes of the rectified cameras be the parallel to the line
% joining the camera centres. This is a hard requirement for the images to
% line up in the y-axis
xNew = c1 - c2;
xNew = xNew / norm(xNew);
% Extracting the old z-axes vectors
zOld1 = R1(3,1:3)';
zOld2 = R2(3,1:3)';
% Find the component of z-axes perpendicular to the new x-axis and let the
% new z-axis of the rectified cameras be the bilinear vector between them
k1 = zOld1 - dot(zOld1,xNew).*xNew;
k2 = zOld2 - dot(zOld2,xNew).*xNew;
if(norm(k1)<1e-4)
k1 = [ 0 0 0]';
else
k1 = k1 / norm(k1);
end
if(norm(k2)<1e-4)
k2 = [ 0 0 0]';
else
k2 = k2 / norm(k2);
end
zNew = k1+k2;
zNew = zNew/norm(zNew);
% The new y-axis is perpendicular to the new x-axis and z-axis
yNew = cross(zNew,xNew);
% Computing the new rotation matrix
Rnew = [xNew' ; yNew'; zNew'];
% Compute the matrix, T1 & T2 for rectifying camera 1 and 2 respectively
T1 = K1*Rnew*inv(P1(1:3,1:3));
T2 = K2*Rnew*inv(P2(1:3,1:3));
% Compute the new projection matrix of the rectified cameras
Pnew1 = K1*Rnew*[eye(3) -c1];
Pnew2 = K2*Rnew*[eye(3) -c2];
%*************************************************************************
%Rectifying the images using the computed rectification matrices T1 and T2
%**************************************************************************
% Finding the corners of the rectified images and hence their sizes
nx = size(im1,2); ny = size(im1,1);
corners1 = inhomogeneous(T1*[1 1 1; nx 1 1; 1 ny 1; nx ny 1]');
nx = size(im2,2); ny = size(im2,1);
corners2 = inhomogeneous(T2*[1 1 1; nx 1 1; 1 ny 1; nx ny 1]');
corners = [corners1 corners2];
minx = floor(min(corners(1,:))-1);
miny = floor(min(corners(2,:))-1);
maxx = ceil(max(corners(1,:))+1);
maxy = ceil(max(corners(2,:))+1);
b = [minx miny maxx maxy];
% Rectifying the images using T1 and T2 to get the rectified images
newim1 = rectify_im(im1, T1, b);
newim2 = rectify_im(im2, T2, b);
newcpts1 = inhomogeneous(T1*cpts);
newcpts2 = inhomogeneous(T2*cpts2);
% plot the rectified images and the epipolar lines. Observe that the
% epipolar lines matches and are horizontal (or almost)
figure(4),
imagesc(minx:maxx, miny:maxy, newim1), axis xy, axis on, hold on
line([minx; maxx], [newcpts1(2,:); newcpts2(2,:)]);
plot(newcpts1(1,:), newcpts1(2,:), 'g*')
axis equal
title('First rectified image');
colormap gray;
figure(5),
imagesc(minx:maxx, miny:maxy, newim2), axis xy, axis on, hold on
line([minx; maxx], [newcpts2(2,:); newcpts2(2,:)]);
plot(newcpts2(1,:), newcpts2(2,:), 'g*')
axis equal
title('Second rectified image');
colormap gray;
%**************************************************************************
% Verifying equation(35) disparity = |c2-c1|/depth from Section V
%**************************************************************************
for k=1:size(X,2)
truedepth(k) = dot(zNew, X(1:3,k)-c1 );
end
% Correcting for the camera matrix K1 and K2
newcpts1c = inv(K1)*[newcpts1 ; ones(1,size(X,2))];
newcpts2c = inv(K1)*[newcpts2 ; ones(1,size(X,2))];
% Computing the disparity (x-coordinates)
d = newcpts2c(1,:)-newcpts1c(1,:);
figure(6)
plot(norm(c2-c1,2)./truedepth, d, 'rx');
title('Verified Relationship between disparity and depth(zoom in)')
xlabel('|c_2-c_1|/depth');
ylabel('disparity');
figure(7)
plot(norm(c2-c1,2)./truedepth, d, 'rx');
axis([0,0.5,0,0.5]);
title('Verified Relationship between disparity and depth')
xlabel('|c_2-c_1|/depth');
ylabel('disparity');
% ********END OF MAIN PROGRAM*******
%**************************************************************************
% SHORT FUNCTIONS USED IN MAIN PROGRAM
%**************************************************************************
% QR Factorization of the camera projection matrix P
function [K,R,t] = decomposeCamP(P)
Q = inv(P(1:3,1:3));
[U,B] = qr(Q);
R = inv(U);
t = B*P(1:3,4);
K = inv(B);
K = K./K(3,3);
% Return associated skew matrix of a given 3D vector
function S_hat = skew(S)
S_hat = zeros(3,3);
S_hat(1,2) = - S(3); S_hat(1,3) = S(2); S_hat(2,3) = - S(1);
S_hat(2,1) = S(3); S_hat(3,1) = - S(2); S_hat(3,2) = S(1);
% Convert Y in 2D homogeneous coordinates to X in inhomogeneous coordinates
function X = inhomogeneous(Y)
X = zeros(2,size(Y,2));
X(1,:) = Y(1,:)./Y(3,:);
X(2,:) = Y(2,:)./Y(3,:);
没有合适的资源?快使用搜索试试~ 我知道了~
3D图象的MATLAB仿真
共11个文件
m:7个
jpg:2个
mat:2个
3星 · 超过75%的资源 需积分: 9 27 下载量 118 浏览量
2010-01-16
11:43:31
上传
评论 2
收藏 545KB RAR 举报
温馨提示
这个是国外一个大牛编写的3D图象的MATLAB 程序,适合3D图象处理的信息专业的同志们享用,我没有使用过,不知道会不会出现问题,
资源推荐
资源详情
资源评论
收起资源包目录
ProjectCode.rar (11个子文件)
cobject2.JPG 141KB
camP2.mat 534B
reconstruct3D.m 3KB
inhomogeneous.m 185B
normalise2dpts.m 2KB
cobject.JPG 126KB
main.m 8KB
camP1.mat 599B
rectify_im.m 3KB
bilinear_interpolate.m 4KB
normalise3dpts.m 2KB
共 11 条
- 1
资源评论
- mengyuekelvin2013-02-25感谢楼主,不过太复杂了。。。
orangeyellow200
- 粉丝: 0
- 资源: 6
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功