在OpenCV C+&#x2B中缝制两个图像

发布于 2025-01-27 05:46:15 字数 5082 浏览 3 评论 0原文

我想通过缝制一些图像来制作肌肉图像。 我是通过SIFT功能来执行此操作的。 当我缝制以下两个图像时:

img1

img2

我实现结果。如您所见,这些图像没有完全对齐,它们之间存在差距。你能告诉我什么问题是什么,我该如何解决?

这是我用于缝线的功能:

Mat StitchVertical(Mat& imGrayImg1, Mat& imGrayImg2)
{
Mat GrayImg1(imGrayImg1, Rect(0, 0, imGrayImg1.cols, imGrayImg1.rows));  // init roi 
Mat GrayImg2(imGrayImg2, Rect(0, 0, imGrayImg2.cols, imGrayImg2.rows));

cvtColor(GrayImg1, GrayImg1, COLOR_BGR2GRAY);
cvtColor(GrayImg2, GrayImg2, COLOR_BGR2GRAY);

vector<cv::KeyPoint> keypointsImg1, keypointsImg2;
Mat descriptorImg1, descriptorImg2;        
BFMatcher matcher;

vector<cv::DMatch> matches, good_matches;
cv::Ptr<cv::SIFT> sift = cv::SIFT::create();
int i, dist = 80;

sift->detectAndCompute(GrayImg1, cv::Mat(), keypointsImg1, descriptorImg1); /* get keypoints of ROI image */
sift->detectAndCompute(GrayImg2, cv::Mat(), keypointsImg2, descriptorImg2); /* get keypoints of the image */
matcher.match(descriptorImg1, descriptorImg2, matches);  // Matching between descriptors 

double max_dist = 0; double min_dist = 5000;
for (int i = 0; i < descriptorImg1.rows; i++)
{
    double dist = matches[i].distance;
    if (dist < min_dist) min_dist = dist;
    if (dist > max_dist) max_dist = dist;
}
for (i = 0; i < descriptorImg1.rows; i++)
{
    if (matches[i].distance < 3 * min_dist)
    {
        good_matches.push_back(matches[i]);
    }
}

Mat img_matches;
// Draw match 
drawMatches(imGrayImg1, keypointsImg1, imGrayImg2, keypointsImg2,
    good_matches, img_matches, Scalar::all(-1),
    Scalar::all(-1), vector<char>(),
    DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
//imshow("matches", img_matches);

vector<Point2f> keypoints1, keypoints2;
for (i = 0; i < good_matches.size(); i++)
{
    keypoints1.push_back(keypointsImg2[good_matches[i].trainIdx].pt);
    keypoints2.push_back(keypointsImg1[good_matches[i].queryIdx].pt);
}
Mat H, H2;
H = findHomography(keypoints1, keypoints2, RANSAC);
H2 = findHomography(keypoints2, keypoints1, RANSAC);
Mat stitchedImage;
int mRows = imGrayImg2.cols;
if (imGrayImg1.cols > imGrayImg2.cols)
{
    mRows = imGrayImg1.cols;
}
int count = 0;
for (int i = 0; i < keypoints2.size(); i++)
{
    if (keypoints2[i].y >= imGrayImg2.rows / 3) { count++; }
}
int minx,miny;
if (count / float(keypoints2.size()) >= 0.5)  // To be spliced imGrayImg2 The image is on the right 
{
    cout << "imGrayImg1 should left" << endl;
    vector<Point2f>corners(4);
    vector<Point2f>corners2(4);
    corners[0] = Point(0, 0);
    corners[1] = Point(0, imGrayImg2.rows);
    corners[2] = Point(imGrayImg2.cols, imGrayImg2.rows);
    corners[3] = Point(imGrayImg2.cols, 0);

    stitchedImage = Mat::zeros( mRows, imGrayImg2.rows + imGrayImg1.rows, CV_8UC3);
    warpPerspective(imGrayImg2, stitchedImage, H, Size(mRows, imGrayImg1.cols + imGrayImg2.cols));
    perspectiveTransform(corners, corners2, H);
    Mat half(stitchedImage, Rect(0, 0, imGrayImg1.cols, imGrayImg1.rows));
    imGrayImg1.copyTo(half);
    minx = stitchedImage.size().width;
    miny = stitchedImage.size().height;
    if ((int(corners2[2].x) - 10) < stitchedImage.size().width) {
        minx = (int(corners2[2].x) - 10);
    }
    if ((int(corners2[2].y) - 10) < stitchedImage.size().height) {
        miny = (int(corners2[2].y) - 10);
    }
    Rect crop_region(0, 0, minx, miny);
    Mat cropped_image = stitchedImage(crop_region);
    return cropped_image;
}
else 
{
    cout << "imGrayImg2 should be up" << endl;
    stitchedImage = Mat::zeros(mRows, imGrayImg2.rows + imGrayImg1.rows, CV_8UC3);
    warpPerspective(imGrayImg1, stitchedImage, H2, Size(mRows, imGrayImg1.cols + imGrayImg2.cols));
    //imshow("temp", stitchedImage);

    vector<Point2f>corners(4);
    vector<Point2f>corners2(4);
    corners[0] = Point(0, 0);
    corners[1] = Point(0, imGrayImg1.rows);
    corners[2] = Point(imGrayImg1.cols, imGrayImg1.rows);
    corners[3] = Point(imGrayImg1.cols, 0);
    cout << "second if in up and down" << endl;

    perspectiveTransform(corners, corners2, H2);  // Affine transformations correspond to endpoints 

    Mat half(stitchedImage, Rect(0, 0, imGrayImg2.cols, imGrayImg2.rows));
    imGrayImg2.copyTo(half);
    minx = stitchedImage.size().width;
    miny = stitchedImage.size().height;
    if ((int(corners2[2].x) - 10) < stitchedImage.size().width) {
        minx = (int(corners2[2].x) - 10);
    }
    if ((int(corners2[2].y) - 10) < stitchedImage.size().height) {
        miny = (int(corners2[2].y) - 10);
    }

    Rect crop_region(0, 0, minx, miny);
    Mat cropped_image = stitchedImage(crop_region);        return cropped_image;
}
imwrite("result.bmp", stitchedImage);
return stitchedImage;

}

I want to make a musaic image by stitching some images.
I am doing this by sift features.
when I stitch the following two images :

img1

img2

I achieve this result. as you can see the images are not aligned completely and there is a gap between them. Can you tell me what is the problem and how can I solve it?

Here is my function that I used for stitching :

Mat StitchVertical(Mat& imGrayImg1, Mat& imGrayImg2)
{
Mat GrayImg1(imGrayImg1, Rect(0, 0, imGrayImg1.cols, imGrayImg1.rows));  // init roi 
Mat GrayImg2(imGrayImg2, Rect(0, 0, imGrayImg2.cols, imGrayImg2.rows));

cvtColor(GrayImg1, GrayImg1, COLOR_BGR2GRAY);
cvtColor(GrayImg2, GrayImg2, COLOR_BGR2GRAY);

vector<cv::KeyPoint> keypointsImg1, keypointsImg2;
Mat descriptorImg1, descriptorImg2;        
BFMatcher matcher;

vector<cv::DMatch> matches, good_matches;
cv::Ptr<cv::SIFT> sift = cv::SIFT::create();
int i, dist = 80;

sift->detectAndCompute(GrayImg1, cv::Mat(), keypointsImg1, descriptorImg1); /* get keypoints of ROI image */
sift->detectAndCompute(GrayImg2, cv::Mat(), keypointsImg2, descriptorImg2); /* get keypoints of the image */
matcher.match(descriptorImg1, descriptorImg2, matches);  // Matching between descriptors 

double max_dist = 0; double min_dist = 5000;
for (int i = 0; i < descriptorImg1.rows; i++)
{
    double dist = matches[i].distance;
    if (dist < min_dist) min_dist = dist;
    if (dist > max_dist) max_dist = dist;
}
for (i = 0; i < descriptorImg1.rows; i++)
{
    if (matches[i].distance < 3 * min_dist)
    {
        good_matches.push_back(matches[i]);
    }
}

Mat img_matches;
// Draw match 
drawMatches(imGrayImg1, keypointsImg1, imGrayImg2, keypointsImg2,
    good_matches, img_matches, Scalar::all(-1),
    Scalar::all(-1), vector<char>(),
    DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
//imshow("matches", img_matches);

vector<Point2f> keypoints1, keypoints2;
for (i = 0; i < good_matches.size(); i++)
{
    keypoints1.push_back(keypointsImg2[good_matches[i].trainIdx].pt);
    keypoints2.push_back(keypointsImg1[good_matches[i].queryIdx].pt);
}
Mat H, H2;
H = findHomography(keypoints1, keypoints2, RANSAC);
H2 = findHomography(keypoints2, keypoints1, RANSAC);
Mat stitchedImage;
int mRows = imGrayImg2.cols;
if (imGrayImg1.cols > imGrayImg2.cols)
{
    mRows = imGrayImg1.cols;
}
int count = 0;
for (int i = 0; i < keypoints2.size(); i++)
{
    if (keypoints2[i].y >= imGrayImg2.rows / 3) { count++; }
}
int minx,miny;
if (count / float(keypoints2.size()) >= 0.5)  // To be spliced imGrayImg2 The image is on the right 
{
    cout << "imGrayImg1 should left" << endl;
    vector<Point2f>corners(4);
    vector<Point2f>corners2(4);
    corners[0] = Point(0, 0);
    corners[1] = Point(0, imGrayImg2.rows);
    corners[2] = Point(imGrayImg2.cols, imGrayImg2.rows);
    corners[3] = Point(imGrayImg2.cols, 0);

    stitchedImage = Mat::zeros( mRows, imGrayImg2.rows + imGrayImg1.rows, CV_8UC3);
    warpPerspective(imGrayImg2, stitchedImage, H, Size(mRows, imGrayImg1.cols + imGrayImg2.cols));
    perspectiveTransform(corners, corners2, H);
    Mat half(stitchedImage, Rect(0, 0, imGrayImg1.cols, imGrayImg1.rows));
    imGrayImg1.copyTo(half);
    minx = stitchedImage.size().width;
    miny = stitchedImage.size().height;
    if ((int(corners2[2].x) - 10) < stitchedImage.size().width) {
        minx = (int(corners2[2].x) - 10);
    }
    if ((int(corners2[2].y) - 10) < stitchedImage.size().height) {
        miny = (int(corners2[2].y) - 10);
    }
    Rect crop_region(0, 0, minx, miny);
    Mat cropped_image = stitchedImage(crop_region);
    return cropped_image;
}
else 
{
    cout << "imGrayImg2 should be up" << endl;
    stitchedImage = Mat::zeros(mRows, imGrayImg2.rows + imGrayImg1.rows, CV_8UC3);
    warpPerspective(imGrayImg1, stitchedImage, H2, Size(mRows, imGrayImg1.cols + imGrayImg2.cols));
    //imshow("temp", stitchedImage);

    vector<Point2f>corners(4);
    vector<Point2f>corners2(4);
    corners[0] = Point(0, 0);
    corners[1] = Point(0, imGrayImg1.rows);
    corners[2] = Point(imGrayImg1.cols, imGrayImg1.rows);
    corners[3] = Point(imGrayImg1.cols, 0);
    cout << "second if in up and down" << endl;

    perspectiveTransform(corners, corners2, H2);  // Affine transformations correspond to endpoints 

    Mat half(stitchedImage, Rect(0, 0, imGrayImg2.cols, imGrayImg2.rows));
    imGrayImg2.copyTo(half);
    minx = stitchedImage.size().width;
    miny = stitchedImage.size().height;
    if ((int(corners2[2].x) - 10) < stitchedImage.size().width) {
        minx = (int(corners2[2].x) - 10);
    }
    if ((int(corners2[2].y) - 10) < stitchedImage.size().height) {
        miny = (int(corners2[2].y) - 10);
    }

    Rect crop_region(0, 0, minx, miny);
    Mat cropped_image = stitchedImage(crop_region);        return cropped_image;
}
imwrite("result.bmp", stitchedImage);
return stitchedImage;

}

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。
列表为空,暂无数据
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文