一切图像皆Mat
OpenCV中图像对象的创建与复制
Mat基本结构
Mat对象数据组成:头部和数据部分,头部存储图像的属性(大小、宽高、图像类型:浮点数类型、字节类型、16位整型、32位整型、双精度浮点型,通道数量和获取途径),数据部分存储所有像素值(像素点)的集合
- 使用赋值方法,可以将源Mat对象赋值给另一个Mat对象,但两个对象的内存指针仍指向同一个Data Block数据块,数据的本质没有变
- 使用copyTo或者clone方法,可以重新创建一个数据库给要拷贝的对象来使用,指向的是不同的Data Block数据库
demo01
.h
#pragma once
#include <opencv2/opencv.hpp>
using namespace cv;
class Quickopencv
{
public:
void mat_create_demo(Mat& image);
};
.cpp
#include "Quickopencv.h"
void Quickopencv::mat_create_demo(Mat& image)
{
Mat m1, m2;
m1 = image.clone();
image.copyTo(m2);
//创建空白对象
Mat m3 = Mat::zeros(Size(8,8),CV_8UC1);
//Mat m3 = Mat::zeros(Size(8,8),CV_8UC3);
std::cout << m3 << std::endl;
}
main.cpp
#include "Quickopencv.h"
void Quickopencv::mat_create_demo(Mat& image)
{
Mat m1, m2;
m1 = image.clone();
image.copyTo(m2);
//创建空白对象
Mat m3 = Mat::zeros(Size(512,512),CV_8UC3);
m3 = Scalar(255,0,0);
std::cout << "width:" << m3.cols << ";height:" << m3.rows \
<<";channels:" <<m3.channels()<< std::endl;
//std::cout << m3 << std::endl;
//Mat m4 = m3;
Mat m4;
m3.copyTo(m4);
m4 = Scalar(0,255,255);
Mat m5 = m3.clone();
m5 = Scalar(0, 0, 255);
imshow("图像3", m3);
imshow("图像4", m4);
imshow("图像5", m5);
}
//创建空白对象
Mat m3 = Mat::zeros(Size(8,8),CV_8UC1);
std::cout << m3 << std::endl;
创建空白对象初始化方法 ones和zeros,像素等于分辨率乘上尺寸
Mat::zeros()方法可以将每个像素点的像素值初始化为1
CV_8UC1:表示8位(8*8=64个像素点)的unsigned char单通道数据
CV_8UC3:创建3通道,一个像素点有3个像素值(3通道)
//创建空白对象
Mat m3 = Mat::zeros(Size(8,8),CV_8UC3);
std::cout << "width:" << m3.cols << ";height:" << m3.rows \
<<";channels:" <<m3.channels()<< std::endl;
std::cout << m3 << std::endl;
数据真正宽度为 宽*通道数 ->8 * 3=24
Mat::ones()方法只能将每个像素点的第一个通道的像素值初始化为1,其它仍为0
Mat::Scalar(parm1,parm2,parm3)方法为每个像素点的各通道赋值
//创建空白对象
Mat m3 = Mat::zeros(Size(8,8),CV_8UC3);
m3 = Scalar(127,127,127);
std::cout << "width:" << m3.cols << ";height:" << m3.rows \
<<";channels:" <<m3.channels()<< std::endl;
std::cout << m3 << std::endl;
//创建空白对象
Mat m3 = Mat::zeros(Size(512,512),CV_8UC3);
m3 = Scalar(127,127,127);
std::cout << "width:" << m3.cols << ";height:" << m3.rows \
<<";channels:" <<m3.channels()<< std::endl;
std::cout << m3 << std::endl;
imshow("创建图像",m3);
创建纯蓝色图像
Mat m3 = Mat::zeros(Size(512,512),CV_8UC3);
m3 = Scalar(255,0,0);
std::cout << "width:" << m3.cols << ";height:" << m3.rows \
<<";channels:" <<m3.channels()<< std::endl;
//std::cout << m3 << std::endl;
imshow("图像",m3);
使用赋值方法将m3的颜色变成黄色
Mat m3 = Mat::zeros(Size(512,512),CV_8UC3);
m3 = Scalar(255,0,0);
std::cout << "width:" << m3.cols << ";height:" << m3.rows \
<<";channels:" <<m3.channels()<< std::endl;
//std::cout << m3 << std::endl;
Mat m4 = m3;
m4 = Scalar(0,255,255);
imshow("图像", m3);
通过copyTo和clone方法,得到不同颜色
Mat m3 = Mat::zeros(Size(512,512),CV_8UC3);//尺寸512*512 像素为8位
m3 = Scalar(255,0,0);
std::cout << "width:" << m3.cols << ";height:" << m3.rows \
<<";channels:" <<m3.channels()<< std::endl;
//std::cout << m3 << std::endl;
//Mat m4 = m3;
Mat m4;
m3.copyTo(m4);
m4 = Scalar(0,255,255);
Mat m5 = m3.clone();
m5 = Scalar(0, 0, 255);
imshow("图像3", m3);
imshow("图像4", m4);
imshow("图像5", m5);
图像像素的读写操作
彩色图像为三通道,灰度图像为单通道
-
三通道通过image.at<Vec3b>(row, col)方法一次性获取三通道值
- Vec3b 8位char
- Vec32 32int型
- Vec3f 浮点数
-
Vec3b bgr = image.at<Vec3b>(row, col);
- Vec3b 定义的变量相当于一个数组
.h
#pragma once
#include <opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void pixel_visit_demo(Mat &image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::pixel_visit_demo(Mat& image)
{
int w = image.cols;
int h = image.rows;
int dims = image.channels();
// for (int row = 0; row < h; row++)//色彩取反
// {
// for (int col=0;col < w; col++)
// {
// if (dims == 1)//灰度图像
// {
// int pv = image.at<uchar>(row, col);
// image.at<uchar>(row, col) = 255 - pv;
// }
// else if (dims == 3)//彩色图像
// {
// Vec3b bgr = image.at<Vec3b>(row, col);
// image.at<Vec3b>(row, col)[0] = 255 - bgr[0];
// image.at<Vec3b>(row, col)[1] = 255 - bgr[1];
// image.at<Vec3b>(row, col)[2] = 255 - bgr[2];
// }
// }
// }
for (int row = 0; row < h; row++)//色彩取反
{
uchar* current_row = image.ptr<uchar>(row);
for (int col = 0; col < w; col++)
{
if (dims == 1)//灰度图像
{
int pv = *current_row;
*current_row++ = 255 - pv;
}
if (dims == 3) //彩色图像
{
*current_row++ = 255 - *current_row;
*current_row++ = 255 - *current_row;
*current_row++ = 255 - *current_row;
}
}
}
imshow("像素读写演示",image);
}
main.cpp
#include <iostream>
#include <opencv2/opencv.hpp>
#include "QuickDemo.h"
using namespace std;
using namespace cv;
int main()
{
Mat src = imread("D:/img/dog_two.jpg");
if (src.empty())
{
cout << "could not load image...\n";
return -1;
}
imshow("输入窗口",src);
QuickDemo qd;
qd.pixel_visit_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
图像像素的算术操作
.h
#pragma once
#include <opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void operator_demo(Mat &img);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::operator_demo(Mat& img)
{
/*方法一
Mat dst;
//dst = img + Scalar(50, 50, 50);
//imshow("加法操作",dst);
//dst = img - Scalar(50, 50, 50);
//imshow("减法操作", dst);
Mat m = Mat::zeros(img.size(),img.type());
m = Scalar(2, 2, 2);
multiply(img,m,dst);
imshow("乘法操作", dst);
*/
/*方法二
Mat dst = Mat::zeros(img.size(),img.type());
Mat m = Mat::zeros(img.size(),img.type());
m = Scalar(50,50,50);
int w=img.cols;
int h=img.rows;
int dims=img.channels();
for(int row=0;row<h;row++)
for(int col=0;col<w;col++)
{
Vec3b p1=img.at<Vec3b>(row,col);
Vec3b p2=m.at<Vec3b>(row,col);
dst.at<Vec3b>(row,col)[0]=saturate_cast<uchar>(p1[0]+p2[0]);
dst.at<Vec3b>(row,col)[1]=saturate_cast<uchar>(p1[1]+p2[1]);
dst.at<Vec3b>(row,col)[2]=saturate_cast<uchar>(p1[2]+p2[2]);
}
imshow("加法操作", dst);
*/
//方法三
Mat dst = Mat::zeros(img.size(), img.type());
Mat m = Mat::zeros(img.size(), img.type());
m = Scalar(50, 50, 50);
//add(img,m,dst);
subtract(img,m,dst);
//divide(img,m,dst);
//multiply(img,m,dst);
imshow("减法操作",dst);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
using namespace cv;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("初始图像",src);
QuickDemo qd;
qd.operator_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
方法一(通道图+标量)
通过imread默认读取三通道,使用加法 三通道图+标量,提升亮度
dst = img + Scalar(50, 50, 50);
imshow(“加法操作”,dst);
使用加法 三通道图-标量,降低亮度
dst = img - Scalar(50, 50, 50);
imshow(“减法操作”, dst);
使用除法法 三通道图/2,降低2倍亮度
dst = img / Scalar(2, 2, 2);
imshow(“除法操作”, dst);
乘法操作,不能直接乘,必须通过创建图像然后通过multiply()函数得到运算结果
void QuickDemo::operator_demo(Mat& img)
{
Mat dst;
//dst = img + Scalar(50, 50, 50);
//imshow("加法操作",dst);
//dst = img - Scalar(50, 50, 50);
//imshow("减法操作", dst);
Mat m = Mat::zeros(img.size(),img.type());
m = Scalar(2, 2, 2);
multiply(img,m,dst);//第一个和第二个参数为源图像,第三个为目标图像
imshow("乘法操作", dst);
}
方法二(手动操作两张图像具体像素点运算)
以加法为例,将像素点像素值提高50
saturate_cast()函数可将像素值下限设置为0,上限为255(防止溢出)
Mat dst = Mat::zeros(img.size(),img.type());
Mat m = Mat::zeros(img.size(),img.type());
m = Scalar(50,50,50);
int w=img.cols;
int h=img.rows;
int dims=img.channels();
for(int row=0;row<h;row++)
for(int col=0;col<w;col++)
{
Vec3b p1=img.at<Vec3b>(row,col);
Vec3b p2=m.at<Vec3b>(row,col);
dst.at<Vec3b>(row,col)[0]=saturate_cast<uchar>(p1[0]+p2[0]);
dst.at<Vec3b>(row,col)[1]=saturate_cast<uchar>(p1[1]+p2[1]);
dst.at<Vec3b>(row,col)[2]=saturate_cast<uchar>(p1[2]+p2[2]);
}
imshow("加法操作", dst);
方法三(调用OpenCV提供的API,操作两个图像运算)
Mat dst = Mat::zeros(img.size(), img.type());
Mat m = Mat::zeros(img.size(), img.type());
m = Scalar(50, 50, 50);
//add(img,m,dst);
subtract(img,m,dst);
//divide(img,m,dst);
//multiply(img,m,dst);
imshow("减法操作",dst);
滚动条操作演示
.h
#pragma once
#include<opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void tracking_bar_demo(Mat& image);
};
.cpp
#include "QuickDemo.h"
Mat src,dst, m;
int lightness = 50;
static void on_track(int,void*)
{
m = Scalar(lightness, lightness, lightness);
add(src,m,dst);
imshow("亮度调整",dst);
}
void QuickDemo::tracking_bar_demo(Mat& image)
{
//在窗口上创建拖动条
namedWindow("亮度调整",WINDOW_AUTOSIZE);
dst = Mat::zeros(image.size(),image.type());
m = Mat::zeros(image.size(), image.type());
src = image;
int max_value = 100;
createTrackbar("Value Bar","亮度调整",&lightness,max_value,on_track);
//第一个参数为拖动条名称 第二个参数为操作窗口名称 第三个参数为实时操作进度条值 第四个参数为最大值 第五个参数为事件函数
on_track(50,0);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace cv;
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("初始图像", src);
QuickDemo qd;
qd.tracking_bar_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
滚动条调整-参数传递度(调整亮度与对比度)
.h
#pragma once
#include<opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void tracking_bar_demo(Mat& image);
};
.cpp
#include "QuickDemo.h"
static void on_lightness(int b, void* userdata)
{
Mat image = *((Mat*)userdata);
Mat dst = Mat::zeros(image.size(), image.type());
Mat m = Mat::zeros(image.size(), image.type());
m = Scalar(b, b, b);
addWeighted(image,1.0,m,0,b,dst);
//subtract(image, m, dst);
imshow("亮度与对比度调整", dst);
}
static void on_contrast(int b, void* userdata)
{
Mat image = *((Mat*)userdata);
Mat dst = Mat::zeros(image.size(),image.type());
Mat m = Mat::zeros(image.size(), image.type());
double contrast = b / 100.0;
addWeighted(image,contrast,m,0.0,0,dst);
imshow("亮度与对比度调整",dst);
}
void QuickDemo::tracking_bar_demo(Mat& image)
{
//在窗口上创建拖动条
namedWindow("亮度与对比度调整", WINDOW_AUTOSIZE);
int max_value = 100;
int lightness = 50;
int contrast_value = 100;
createTrackbar("Value Bar", "亮度与对比度调整", &lightness, max_value, on_lightness,(void*)(&image));
createTrackbar("Contrast Bar", "亮度与对比度调整", &contrast_value, 200, on_contrast, (void*)(&image));
on_lightness(50, &image);
//on_contrast(100,&image);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace cv;
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("初始图像", src);
QuickDemo qd;
qd.tracking_bar_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
键盘响应操作
通过waitKey()来实现等待键盘按下
void QuickDemo::key_demos(Mat& image)
{
Mat dst;
while (true)
{
int c = waitKey(100);//停留100ms,等待按键按下
if (c == 27)//ESC
{
break;
}
if (c == 49)//Key #1 数字键1
{
std::cout << "you enter key #1" << std::endl;
}
if (c == 50)//Key #2 数字键2
{
std::cout << "you enter key #2" << std::endl;
}
if (c == 51)//Key #3 数字键3
{
std::cout << "you enter key #3" << std::endl;
}
}
}
实现过程
.h
#pragma once
#include <opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void key_demos(Mat &image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::key_demos(Mat& image)
{
Mat dst=Mat::zeros(image.size(),image.type());//初始化dst图像
while (true)
{
int c = waitKey(100);//停留100ms,等待按键按下//注意对视频操作要特殊处理
if (c == 27)//ESC
{
break;
}
if (c == 49)//Key #1 数字键1
{
std::cout << "you enter key #1" << std::endl;
cvtColor(image,dst,COLOR_BGR2GRAY);//将图像变为灰度图像
//imshow("键盘响应",dst);
}
if (c == 50)//Key #2 数字键2
{
std::cout << "you enter key #2" << std::endl;
cvtColor(image, dst, COLOR_BGR2HSV);//将图像变为HSV
//imshow("键盘响应", dst);
}
if (c == 51)//Key #3 数字键3
{
std::cout << "you enter key #3" << std::endl;
dst = Scalar(50, 50, 50);
add(image, dst, dst);
//imshow("键盘响应", dst);
}
imshow("键盘响应", dst);
}
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
using namespace cv;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("初始图像", src);
QuickDemo qd;
qd.key_demos(src);
waitKey(0);
destroyAllWindows();
return 0;
}
OpenCV自带颜色表操作
src输入图像可以是彩色图像也可以是灰度图像,dst输出图像由COLORMAP决定,现今OpenCV已支持20多种颜色风格
循环转换颜色风格 我们首先把颜色表做成一个枚举数组
int colormap[]= {
COLORMAP_AUTUMN,
COLORMAP_BONE,
COLORMAP_JET,
COLORMAP_WINTER,
COLORMAP_RAINBOW,
COLORMAP_OCEAN,
COLORMAP_SUMMER,
COLORMAP_SPRING,
COLORMAP_COOL,
COLORMAP_HSV,//10
COLORMAP_PINK,
COLORMAP_HOT,
COLORMAP_PARULA,
COLORMAP_MAGMA,
COLORMAP_INFERNO,
COLORMAP_PLASMA,
COLORMAP_VIRIDIS,
COLORMAP_CIVIDIS,
COLORMAP_TWILIGHT,
COLORMAP_TWILIGHT_SHIFTED,//20
COLORMAP_TURBO,
COLORMAP_DEEPGREEN
};
实现过程,思考:如何用按键实现选择显示
.h
#pragma once
#include <opencv2/opencv.hpp>
using namespace cv;
extern int colormap[];
class QuickDemo
{
public:
void color_style_demo(Mat& image);
};
.cpp
#include "QuickDemo.h"
int colormap[] = {
COLORMAP_AUTUMN,
COLORMAP_BONE,
COLORMAP_JET,
COLORMAP_WINTER,
COLORMAP_RAINBOW,
COLORMAP_OCEAN,
COLORMAP_SUMMER,
COLORMAP_SPRING,
COLORMAP_COOL,
COLORMAP_HSV,//10
COLORMAP_PINK,
COLORMAP_HOT,
COLORMAP_PARULA,
COLORMAP_MAGMA,
COLORMAP_INFERNO,
COLORMAP_PLASMA,
COLORMAP_VIRIDIS,
COLORMAP_CIVIDIS,
COLORMAP_TWILIGHT,
COLORMAP_TWILIGHT_SHIFTED,//20
COLORMAP_TURBO,
COLORMAP_DEEPGREEN
};
void QuickDemo::color_style_demo(Mat& image)
{
Mat dst;
int index=0;
while (true)
{
int c = waitKey(2000);
if (c == 27)//退出
{
break;
}
applyColorMap(image,dst,colormap[index%22]);
index++;
imshow("22种颜色风格",dst);
}
}
main.cpp
#include <iostream>
#include <opencv2/opencv.hpp>
#include "QuickDemo.h"
using namespace std;
using namespace cv;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("初始图像", src);
QuickDemo qd;
qd.color_style_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
图像像素的逻辑操作
.h
#pragma once
#include<opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void bitwise_demo(Mat &image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::bitwise_demo(Mat& image)
{
Mat m1 = Mat::zeros(Size(256,256),CV_8UC3);
Mat m2 = Mat::zeros(Size(256,256),CV_8UC3);
rectangle(m1,Rect(100,100,100,80),Scalar(255,255,0),-1,LINE_8,0);
rectangle(m2,Rect(150,150,80,80),Scalar(0,255,255),2,LINE_8,0);
//Rect函数 前两个参数坐标为x,y 后两个参数w,h rectangle第四个参数小于0为填充图像
//第四个参数大于0为绘制图像,相当于描边 第五个参数为反锯齿LINE_8 LINE_4和LINE_AA
imshow("m1",m1);
imshow("m2",m2);
Mat dst;
//bitwise_and(m1,m2,dst);//与操作
//bitwise_or(m1, m2, dst);//或操作
//bitwise_not(image,dst);//取反操作
//Mat dst = ~image;//取反操作
bitwise_xor(m1,m2,dst);//异或
imshow("像素位操作",dst);
}
main.cpp
#include <iostream>
#include <opencv2/opencv.hpp>
#include "QuickDemo.h"
using namespace std;
using namespace cv;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("初始图像", src);
QuickDemo qd;
qd.bitwise_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
通道分离与合并
- 通道分离与合并函数
- split(para1,para2);//通道分离函数
- 第一个参数为源图像
- 第二个参数为目标图像,通常用vector<Mat>存储
- merge(para1, para2);
- 第一个参数为源图像,通常用vector<Mat>存储
- 第二个参数为目标图像
- split(para1,para2);//通道分离函数
void QuickDemo::channels_demo(Mat& image)
{
std::vector<Mat>mv;
split(image,mv);//通道分离函数
imshow("蓝色", mv[0]);
imshow("绿色", mv[1]);
imshow("红色", mv[2]);
Mat dst;
mv[1] = 0;
mv[2] = 0;
merge(mv, dst);
imshow("三通道蓝色",dst);
}
单通道显示为灰度图像,合并后才能呈现彩色图像
split(image,mv);//通道分离函数
imshow("蓝色", mv[0]);
imshow("绿色", mv[1]);
imshow("红色", mv[2]);
Mat dst;
mv[1] = 0;
//mv[2] = 0;
merge(mv, dst);
imshow("双通道蓝+绿",dst);
int from_to[] = {0,2,1,1,2,0};
//第0个通道交换第2个通道,第一个通道不变,第2个通道比第0个通道交换
mixChannels(&image,1,&dst,1,from_to,3);//可以有多个源图像,多个源图像用数组
//1张源图像,1张输出图像,3个通道
imshow("通道混合",dst);
.h
#pragma once
#include "opencv2/opencv.hpp"
using namespace cv;
class QuickDemo
{
public:
void channels_demo(Mat &image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::channels_demo(Mat& image)
{
std::vector<Mat>mv;
split(image,mv);//通道分离函数
//imshow("蓝色", mv[0]);
//imshow("绿色", mv[1]);
//imshow("红色", mv[2]);
Mat dst;
mv[1] = 0;
//mv[2] = 0;
merge(mv, dst);
imshow("双通道蓝+绿",dst);
int from_to[] = {0,2,1,1,2,0};
//第0个通道交换第2个通道,第一个通道不变,第2个通道比第0个通道交换
mixChannels(&image,1,&dst,1,from_to,3);//可以有多个源图像,多个源图像用数组
//1张源图像,1张输出图像,3个通道
imshow("通道混合",dst);
}
main.cpp
#include <opencv2/opencv.hpp>
#include "QuickDemo.h"
#include <iostream>
using namespace std;
int main()
{
Mat src = imread("D:\\千峰嵌入式\\opencv学习\\images\\flower.png");
if (src.empty())
{
cout << "load image failed.../n";
return 0;
}
imshow("初始图像",src);
QuickDemo qd;
qd.channels_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
图像色彩空间转换
由于HSV色彩空间对不同颜色的像素区分比RGB色彩空间更明显,所有转换成HSV色彩空间再操作
HSV色彩空间像素值对应颜色
Mat hsv;
cvtColor(image,hsv,COLOR_BGR2HSV);
Mat mask;
inRange(hsv,Scalar(35,43,46),Scalar(77,255,255),mask);
//inRange hsv源图 +色彩空间范围 最小值-最大值 +目标生成图 //以绿色为例
imshow("mask",mask);
所得图像,空白部分为可编辑部分(绿色),黑色为不可编辑部分
bitwise_not(mask,mask);
imshow("mask", mask);
图像像素取反,得到人物部分为1(可编程)
copyTo(redback,mask)将mask图像上为1的部分迁移到redback上,为0的部分丢弃掉
.cpp
void QuickDemo::inrange_demo(Mat& image)
{
Mat hsv;
cvtColor(image,hsv,COLOR_BGR2HSV);
Mat mask;
inRange(hsv,Scalar(35,43,46),Scalar(77,255,255),mask);
//inRange hsv源图 +色彩空间范围 最小值-最大值 +目标生成图 //以绿色为例
imshow("mask",mask);//白色为1,黑色为0
Mat redback = Mat::zeros(image.size(),image.type());//创建空白图像
redback = Scalar(40,40,200);//创建红色背景布
bitwise_not(mask,mask);
imshow("mask", mask);
image.copyTo(redback,mask);
imshow("roi区域提取",redback);
}
图像像素值统计
方差和均值多用于图像分析
[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-n1PkK719-1687457045356)(https://gitee.com/MyStarOrbit/cloudimages/raw/master/https://gitee.com/MyStarOrbit/cloudimages/image-20230409215312020.png)]
double minv, maxv;
Point minLoc, maxLoc;
std::vector<Mat>mv;
split(image, mv);
for (int i = 0; i < mv.size(); i++)
{
minMaxLoc(mv[i], &minv, &maxv, &minLoc, &maxLoc, Mat());
std::cout <<"No.channels:"<< i << "min value:" << minv << "max value:" << maxv << std::endl;
}
Mat mean, stddev;
meanStdDev(image,mean,stddev);//mean为均值 stddev为方差
mean.at<double>(1, 0);
mean.at<double>(2, 0);
mean.at<double>(3, 0);
stddev.at<double>(1, 0);
stddev.at<double>(2, 0);
stddev.at<double>(3, 0);//单独打印通道
std::cout << "means:" << mean << std::endl;
std::cout << "stddev:" << stddev << std::endl;
纯色图像有均值,但方差恒为0()
double minv, maxv;
Point minLoc, maxLoc;
Mat redback = Mat::zeros(image.size(),image.type());
redback = Scalar(40,40,200);
meanStdDev(redback,mean,stddev);
imshow("redback",redback);
split(redback, mv);
for (int i = 0; i < mv.size(); i++)
{
minMaxLoc(mv[i], &minv, &maxv, &minLoc, &maxLoc, Mat());
std::cout << "No.channels:" << i << "min value:" << minv << "max value:" << maxv << std::endl;
}
std::cout << "means:" << mean << std::endl;
std::cout << "stddev:" << stddev << std::endl;
.h
#pragma once
#include <opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void pixel_static_demo(Mat &image);
};
.cpp
#include"QuickDemo.h"
void QuickDemo::pixel_static_demo(Mat& image)
{
double minv, maxv;
Point minLoc, maxLoc;
std::vector<Mat>mv;
split(image, mv);
for (int i = 0; i < mv.size(); i++)
{
minMaxLoc(mv[i], &minv, &maxv, &minLoc, &maxLoc, Mat());
std::cout <<"No.channels:"<< i << "min value:" << minv << "max value:" << maxv << std::endl;
}
Mat mean, stddev;
meanStdDev(image,mean,stddev);//mean为均值 stddev为方差
mean.at<double>(1, 0);
mean.at<double>(2, 0);
mean.at<double>(3, 0);
stddev.at<double>(1, 0);
stddev.at<double>(2, 0);
stddev.at<double>(3, 0);//单独打印通道
std::cout << "means:" << mean << std::endl;
std::cout << "stddev:" << stddev << std::endl;
Mat redback = Mat::zeros(image.size(),image.type());
redback = Scalar(40,40,200);
meanStdDev(redback,mean,stddev);
imshow("redback",redback);
split(redback, mv);
for (int i = 0; i < mv.size(); i++)
{
minMaxLoc(mv[i], &minv, &maxv, &minLoc, &maxLoc, Mat());
std::cout << "No.channels:" << i << "min value:" << minv << "max value:" << maxv << std::endl;
}
std::cout << "means:" << mean << std::endl;
std::cout << "stddev:" << stddev << std::endl;
}
main.cpp
#include <opencv2/opencv.hpp>
#include "QuickDemo.h"
#include <iostream>
using namespace std;
int main()
{
Mat src = imread("D:\\千峰嵌入式\\opencv学习\\images\\flower.png");
if (src.empty())
{
cout << "load image failed.../n";
return 0;
}
imshow("初始图像", src);
QuickDemo qd;
qd.pixel_static_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
图像几何形状绘制
OpenCV线型lineType
有三个值可选:LINE_8
(默认)、LINE_4
、LINE_AA
速度:LINE_8>LINE_AA
美观:LINE_AA>LINE_8
绘制矩形
Rect rect;//定义一个矩形、
rect.x = 100;
rect.y = 100;
rect.width = 200;
rect.height = 200;
rectangle(image,rect,Scalar(0,0,255),2,8,0);
//image为输入图片 rect为矩形定义信息 线宽为2 linetype为8 最后一个参数固定为0
rect.x = 100;
rect.y = 400;
rectangle(image, rect, Scalar(0, 0, 255), -1, 8, 0);
//线宽为负数表示填充操作
imshow("绘制演示", image);
绘制圆
图片叠加
Rect rect;//定义一个矩形、
rect.x = 100;
rect.y = 100;
rect.width = 200;
rect.height = 200;
Mat bg = Mat::zeros(image.size(),image.type());
//rectangle(bg,rect,Scalar(0,0,255),2,8,0);
//image为输入图片 rect为矩形定义信息 线宽为2 linetype为8 最后一个参数固定为0
//rect.x = 100;
//rect.y = 400;
rectangle(image, rect, Scalar(0, 0, 255), -1, 8, 0);
线宽为负数表示填充操作
circle(bg,Point(350,400),50,Scalar(255,0,0),-1,8,0);
//Point为圆点坐标 50为半径
Mat dst;
addWeighted(image,0.7,bg,0.3,0,dst);//0.7和0.3调整占比
imshow("绘制演示", dst);
绘制直线
Mat dst;
addWeighted(image,0.7,bg,0.3,0,dst);
line(dst, Point(100, 100), Point(300, 400), Scalar(0,255,0),2,LINE_AA,0);
imshow("绘制演示", dst);
绘制椭圆
RotatedRect rrt;
rrt.center = Point(200, 200);
rrt.size = Size(100,200);//设置椭圆弧度
rrt.angle = 0.0;//设置角度
ellipse(bg,rrt,Scalar(0,255,255),2,8);
imshow("绘制演示", bg);
角度为0
角度为90
代码小结
void QuickDemo::drawing_demo(Mat& image)
{
Rect rect;//定义一个矩形、
rect.x = 100;
rect.y = 100;
rect.width = 200;
rect.height = 200;
Mat bg = Mat::zeros(image.size(),image.type());
//rectangle(bg,rect,Scalar(0,0,255),2,8,0);
//image为输入图片 rect为矩形定义信息 线宽为2 linetype为8 最后一个参数固定为0
//rect.x = 100;
//rect.y = 400;
rectangle(image, rect, Scalar(0, 0, 255), -1, 8, 0);
线宽为负数表示填充操作
circle(bg,Point(350,400),50,Scalar(255,0,0),-1,8,0);
//Point为圆点坐标 50为半径
Mat dst;
addWeighted(image,0.7,bg,0.3,0,dst);
line(dst, Point(100, 100), Point(300, 400), Scalar(0,255,0),2,LINE_AA,0);
RotatedRect rrt;
rrt.center = Point(200, 200);
rrt.size = Size(100,200);//设置椭圆弧度
rrt.angle = 90.0;//设置角度
ellipse(bg,rrt,Scalar(0,255,255),2,8);
imshow("绘制演示", bg);
}
随机数与随机颜色
RNG rng(12345);//openCV产生随机数固定写法
rng.uniform(1, 3); 在[1,3)区间,随机生成一个整数
void QuickDemo::random_drawing(Mat& image)
{
Mat canvas = Mat::zeros(Size(512,512),CV_8UC3);//canvas画布
int w = canvas.rows;
int h = canvas.cols;
RNG rng(12345);//设置随机数种子
while (true)
{
int c = waitKey(10);
if (c == 27) {
break;
}
else
{
int x1 = rng.uniform(0,w);
int y1 = rng.uniform(0, h);
int x2 = rng.uniform(0, w);
int y2 = rng.uniform(0, h);
int b = rng.uniform(0, 255);
int g = rng.uniform(0, 255);
int r = rng.uniform(0, 255);
//canvas = Scalar(0,0,0);//加上每画线条之前清空画布,使每次只画一条线
line(canvas,Point(x1,y1), Point(x2, y2),Scalar(b,g,r),1,LINE_AA,0);
imshow("随机绘制演示",canvas);
}
}
}
多边形填充与绘制
polylines()函数绘制|fillPoly()函数填充
多边形绘制
Mat canvas = Mat::zeros(Size(512,512),CV_8UC3);//创建一个512*512的画布
Point p1(100,100);
Point p2(350, 100);
Point p3(450, 280);
Point p4(320, 450);
Point p5(80, 400);
std::vector<Point>pts(5);//存放多边形点集
pts.at(0)=p1;
pts.at(1) = p2;
pts.at(2) = p3;
pts.at(3) = p4;
pts.at(4) = p5;
polylines(canvas, pts, true, Scalar(0, 0, 255), 2, 8, 0);//多边形绘制函数
//参数说明:para1:在哪个图上绘制 2:点集 3:true 4:画笔颜色 5:线宽 6:平滑度 7:相对原点位移
//polylines的assert必须大于0,所以第5的参数不可以填-1用来填充图形
fillPoly(canvas, pts, Scalar(255, 255, 0), 8, 0);//填充函数
drawContours()函数用来绘制轮廓,也可以绘制填充多边形
//drawContours()用来绘制轮廓,也可以绘制填充多边形
std::vector<std::vector<Point>>contours;
contours.push_back(pts);
drawContours(canvas,contours,-1,Scalar(255,0,0),2);
//第三个参数为n(0,1,2,3...)表示绘制contours中的第几个点集,-1表示绘制全部点集
//第五个参数大于0表示绘制,-1表示填充
imshow("多边形绘制",canvas);
//drawContours()用来绘制轮廓,也可以绘制填充多边形
std::vector<std::vector<Point>>contours;
contours.push_back(pts);
drawContours(canvas,contours,-1,Scalar(255,0,0),-1);
//第三个参数为n(0,1,2,3...)表示绘制contours中的第几个点集,-1表示绘制全部点集
//第五个参数大于0表示绘制,-1表示填充
imshow("多边形填充",canvas);
.h
#pragma once
#include <opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void polyline_drawing_demo();
};
.cpp
#include "QuickDemo.h"
void QuickDemo::polyline_drawing_demo()
{
Mat canvas = Mat::zeros(Size(512,512),CV_8UC3);//创建一个512*512的画布
Point p1(100,100);
Point p2(350, 100);
Point p3(450, 280);
Point p4(320, 450);
Point p5(80, 400);
std::vector<Point>pts(5);//存放多边形点集
pts.at(0)=p1;
pts.at(1) = p2;
pts.at(2) = p3;
pts.at(3) = p4;
pts.at(4) = p5;
//polylines(canvas, pts, true, Scalar(0, 0, 255), 2, 8, 0);//多边形绘制函数
//参数说明:para1:在哪个图上绘制 2:点集 3:true 4:画笔颜色 5:线宽 6:平滑度 7:相对原点位移
//polylines的assert必须大于0,所以第5的参数不可以填-1用来填充图形
//fillPoly(canvas, pts, Scalar(255, 255, 0), 8, 0);//填充函数
//drawContours()用来绘制轮廓,也可以绘制填充多边形
std::vector<std::vector<Point>>contours;
contours.push_back(pts);
drawContours(canvas,contours,-1,Scalar(255,0,0),-1);
//第三个参数为n(0,1,2,3...)表示绘制contours中的第几个点集,-1表示绘制全部点集
//第五个参数大于0表示绘制,-1表示填充
imshow("多边形填充",canvas);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
QuickDemo qd;
qd.polyline_drawing_demo();
waitKey(0);
destroyAllWindows();
return 0;
}
鼠标操作与响应
鼠标回调:鼠标回调设置函数+用户自定义函数
初步实现,鼠标抬起才能显示
#include "QuickDemo.h"
//左键右键滚轮
//以左键为例:有三个动作事件 左键按下 鼠标滑动 左键抬起
//创建全局变量来定义初识状态
Point sp(-1, -1);//起始位置
Point ep(-1, -1);//结束位置
static void on_draw(int event,int x,int y,int flags,void *userdata) {
Mat image = *((Mat*)userdata);
if (event == EVENT_LBUTTONDOWN)//鼠标按下
{
sp.x = x;
sp.y = y;
std::cout << sp << std::endl;
}
else if (event == EVENT_LBUTTONUP)//鼠标抬起
{
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if (dx > 0 && dy > 0) {
Rect box(sp.x, sp.y, dx, dy);
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
//为下一次绘制做好准备
sp.x = -1;
sp.y = -1;
}
}
}
void QuickDemo::mouse_drawing_demo(Mat& image)
{
namedWindow("鼠标绘制",WINDOW_AUTOSIZE);
setMouseCallback("鼠标绘制",on_draw,(void*)(&image));//第一个参数鼠标事件所在窗口,第二个为回调函数
imshow("鼠标绘制",image);
}
动态绘制初步实现
#include "QuickDemo.h"
//左键右键滚轮
//以左键为例:有三个动作事件 左键按下 鼠标滑动 左键抬起
//创建全局变量来定义初识状态
Point sp(-1, -1);//起始位置
Point ep(-1, -1);//结束位置
static void on_draw(int event,int x,int y,int flags,void *userdata) {
Mat image = *((Mat*)userdata);
if (event == EVENT_LBUTTONDOWN)//鼠标按下
{
sp.x = x;
sp.y = y;
std::cout << sp << std::endl;
}
else if (event == EVENT_LBUTTONUP)//鼠标抬起
{
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if (dx > 0 && dy > 0) {
Rect box(sp.x, sp.y, dx, dy);
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
//为下一次绘制做好准备
sp.x = -1;
sp.y = -1;
}
}
else if (event == EVENT_MOUSEMOVE)//鼠标移动事件
{
if (sp.x > 0 && sp.y > 0) {
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if (dx > 0 && dy > 0) {
Rect box(sp.x, sp.y, dx, dy);
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
}
}
}
}
void QuickDemo::mouse_drawing_demo(Mat& image)
{
namedWindow("鼠标绘制",WINDOW_AUTOSIZE);
setMouseCallback("鼠标绘制",on_draw,(void*)(&image));//第一个参数鼠标事件所在窗口,第二个为回调函数
imshow("鼠标绘制",image);
}
动态显示完成:通过备份原图来消除上一次轨迹
#include "QuickDemo.h"
//左键右键滚轮
//以左键为例:有三个动作事件 左键按下 鼠标滑动 左键抬起
//创建全局变量来定义初识状态
Point sp(-1, -1);//起始位置
Point ep(-1, -1);//结束位置
Mat temp;//用来保存原图
static void on_draw(int event,int x,int y,int flags,void *userdata) {
Mat image = *((Mat*)userdata);
if (event == EVENT_LBUTTONDOWN)//鼠标按下
{
sp.x = x;
sp.y = y;
std::cout << sp << std::endl;
}
else if (event == EVENT_LBUTTONUP)//鼠标抬起
{
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if (dx > 0 && dy > 0) {
Rect box(sp.x, sp.y, dx, dy);
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
//为下一次绘制做好准备
sp.x = -1;
sp.y = -1;
}
}
else if (event == EVENT_MOUSEMOVE)//鼠标移动事件
{
if (sp.x > 0 && sp.y > 0) {
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if (dx > 0 && dy > 0) {
Rect box(sp.x, sp.y, dx, dy);
temp.copyTo(image);//将鼠标拖动过程中最新的绘制结果更新给image
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
}
}
}
}
void QuickDemo::mouse_drawing_demo(Mat& image)
{
namedWindow("鼠标绘制",WINDOW_AUTOSIZE);
setMouseCallback("鼠标绘制",on_draw,(void*)(&image));//第一个参数鼠标事件所在窗口,第二个为回调函数
imshow("鼠标绘制",image);
temp = image.clone();
}
提取ROI区域
else if (event == EVENT_LBUTTONUP)//鼠标抬起
{
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if (dx > 0 && dy > 0) {
Rect box(sp.x, sp.y, dx, dy);
imshow("ROI区域",image(box));
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
//imshow("ROI区域",image(box));//改变顺序,达到不同显示效果
//为下一次绘制做好准备
sp.x = -1;
sp.y = -1;
}
}
再次调用temp.copyTo(image),取消ROI的红色矩形外框
else if (event == EVENT_LBUTTONUP)//鼠标抬起
{
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if (dx > 0 && dy > 0) {
Rect box(sp.x, sp.y, dx, dy);
temp.copyTo(image);//将鼠标拖动过程中最新的绘制结果更新给image,从而取消ROI的红色矩形外框
imshow("ROI区域", image(box));
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
//为下一次绘制做好准备
sp.x = -1;
sp.y = -1;
}
}
进一步问题优化,将拖动和抬起的判断条件进一步设置范围:ep.x < image.cols && ep.y < image.rows
问题如下,当鼠标超过图片范围,出现bug报错
//解决
else if (event == EVENT_LBUTTONUP)//鼠标抬起
{
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if ((dx > 0 && dy > 0) && (ep.x < image.cols && ep.y < image.rows)) {
Rect box(sp.x, sp.y, dx, dy);
temp.copyTo(image);//将鼠标拖动过程中最新的绘制结果更新给image,从而取消ROI的红色矩形外框
imshow("ROI区域", image(box));
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
//为下一次绘制做好准备
sp.x = -1;
sp.y = -1;
}
}
else if (event == EVENT_MOUSEMOVE)//鼠标移动事件
{
if (sp.x > 0 && sp.y > 0) {
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if ((dx > 0 && dy > 0) && (ep.x < image.cols && ep.y < image.rows)) {
Rect box(sp.x, sp.y, dx, dy);
temp.copyTo(image);//将鼠标拖动过程中最新的绘制结果更新给image
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
}
}
}
本节代码
.cpp
#include "QuickDemo.h"
//左键右键滚轮
//以左键为例:有三个动作事件 左键按下 鼠标滑动 左键抬起
//创建全局变量来定义初识状态
Point sp(-1, -1);//起始位置
Point ep(-1, -1);//结束位置
Mat temp;//用来保存原图
static void on_draw(int event,int x,int y,int flags,void *userdata) {
Mat image = *((Mat*)userdata);
if (event == EVENT_LBUTTONDOWN)//鼠标按下
{
sp.x = x;
sp.y = y;
std::cout << sp << std::endl;
}
else if (event == EVENT_LBUTTONUP)//鼠标抬起
{
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if ((dx > 0 && dy > 0) && (ep.x < image.cols && ep.y < image.rows)) {
Rect box(sp.x, sp.y, dx, dy);
temp.copyTo(image);//将鼠标拖动过程中最新的绘制结果更新给image,从而取消ROI的红色矩形外框
imshow("ROI区域", image(box));
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
//为下一次绘制做好准备
sp.x = -1;
sp.y = -1;
}
}
else if (event == EVENT_MOUSEMOVE)//鼠标移动事件
{
if (sp.x > 0 && sp.y > 0) {
ep.x = x;
ep.y = y;
int dx = ep.x - sp.x;
int dy = ep.y - sp.y;
if ((dx > 0 && dy > 0) && (ep.x < image.cols && ep.y < image.rows)) {
Rect box(sp.x, sp.y, dx, dy);
temp.copyTo(image);//将鼠标拖动过程中最新的绘制结果更新给image
rectangle(image, box, Scalar(0, 0, 255), 2, 8, 0);//填充绘制一个矩形
imshow("鼠标绘制", image);//不能忽略,更新图片显示,否则画的图形无法显示出来
}
}
}
}
void QuickDemo::mouse_drawing_demo(Mat& image)
{
namedWindow("鼠标绘制",WINDOW_AUTOSIZE);
setMouseCallback("鼠标绘制",on_draw,(void*)(&image));//第一个参数鼠标事件所在窗口,第二个为回调函数
imshow("鼠标绘制",image);
temp = image.clone();
}
拓展作业:1、用同样方法绘制一个圆 2、目标检测标注工具
图像像素类型转换与归一化_显示浮点型图像
像素类型转换(常见三种)
像素类型转换
void QuickDemo::norm_demo(Mat& image)
{
Mat dst;
std::cout << image.type() << std::endl;//打印原来的数据类型
image.convertTo(dst,CV_32F);//将dst变为float类型数据
std::cout << dst.type() << std::endl;//打印转换后的数据类型
image.convertTo(dst,CV_32F);//将dst变为int类型数据
std::cout << dst.type() << std::endl;//打印转换后的数据类型
}
- 16代表CV_8UC3的类型数据,表示每个通道八位的无符号字节类型的RGB三通道彩色图像
- 21代表CV_32FC3的类型数据,表示RGB三通道的每个通道是32位的浮点数类型图像
- 20代表CV_32SC3的类型数据,表示RGB三通道的每个通道是32位有符号整型类型图像
像素归一化
[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-wkPOlZd5-1687457045363)(…/…/typora-user-images/image-20230413114455524.png)]
- mask是一个二值图像,不为0的像素参与计算,为0的像素直接忽略,表示只对mask区域归一化
四种归一化方式,使数据在0-1之间
对像素进行归一化处理前先转换成浮点数,不能直接对字节型数据进行归一化处理
void QuickDemo::norm_demo(Mat& image)
{
Mat dst;
std::cout << image.type() << std::endl;//打印原来的数据类型
image.convertTo(image,CV_32F);//将dst变为float类型数据
std::cout << image.type() << std::endl;//打印转换后的数据类型
//CV_8UC3->CV_32FC3
//image.convertTo(dst, CV_32S);//将dst变为int类型数据
//std::cout << dst.type() << std::endl;//打印转换后的数据类型
normalize(image, dst, 1.0, 0, NORM_MINMAX);//将图像像素归一化在0-1之间
std::cout << dst.type() << std::endl;
imshow("图像数据归一化", dst);
}
如果想要imshow对浮点数图像正确显示,浮点数的取值范围必须归一化在0-1之间,即不归一化处理则会出现如下情况(将归一化函数注释掉)
Mat dst;
std::cout << image.type() << std::endl;//打印原来的数据类型
image.convertTo(image,CV_32F);//将dst变为float类型数据
std::cout << image.type() << std::endl;//打印转换后的数据类型
//normalize(image, dst, 1.0, 0, NORM_MINMAX);//将图像像素归一化在0-1之间
std::cout << dst.type() << std::endl;
imshow("图像数据归一化", image);
思考:如何将归一化后的浮点数转回字节型,只需要将归一化后的浮点型*255,然后再通过convertTo函数转成CV_8U或者CV_32S
Mat dst;
std::cout << image.type() << std::endl;//打印原来的数据类型
image.convertTo(image,CV_32F);//将dst变为float类型数据
std::cout << image.type() << std::endl;//打印转换后的数据类型
//CV_8UC3->CV_32FC3
//image.convertTo(dst, CV_32S);//将dst变为int类型数据
//std::cout << dst.type() << std::endl;//打印转换后的数据类型
normalize(image, dst, 1.0, 0, NORM_MINMAX);//将图像像素归一化在0-1之间
std::cout << dst.type() << std::endl;
//imshow("图像数据归一化", dst);
//转回字节型
dst = dst * 255;
dst.convertTo(dst, CV_8U);
std::cout << dst.type() << std::endl;
imshow("图像数据归一化再转回字节型", dst);
[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-ZAixcBGP-1687457045365)(https://gitee.com/MyStarOrbit/cloudimages/raw/master/https://gitee.com/MyStarOrbit/cloudimages/image-20230413145014464.png)]
转回成功,且最后一次打印为CV_8U类型:16
本节代码:
.h
#pragma once
#include<opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void norm_demo(Mat& image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::norm_demo(Mat& image)
{
Mat dst;
std::cout << image.type() << std::endl;//打印原来的数据类型
image.convertTo(image,CV_32F);//将dst变为float类型数据
std::cout << image.type() << std::endl;//打印转换后的数据类型
//CV_8UC3->CV_32FC3
//image.convertTo(dst, CV_32S);//将dst变为int类型数据
//std::cout << dst.type() << std::endl;//打印转换后的数据类型
normalize(image, dst, 1.0, 0, NORM_MINMAX);//将图像像素归一化在0-1之间
std::cout << dst.type() << std::endl;
//imshow("图像数据归一化", dst);
//转回字节型
dst = dst * 255;
dst.convertTo(dst, CV_8U);
std::cout << dst.type() << std::endl;
imshow("图像数据归一化再转回字节型", dst);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
QuickDemo qd;
qd.norm_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
图像放缩与插值
图像放缩
Mat zoomin, zoomout;//放大(景物拉近)、缩小(景物拉远)
int h = image.rows;
int w = image.cols;
resize(image,zoomout,Size(w/2,h/2),0,0,INTER_LINEAR);
//fx和fy默认写0,INTER_LINEAR为线性差值(常用)
imshow("zoomout",zoomout);
resize(image, zoomin, Size(w * 1.5, h * 1.5), 0, 0, INTER_LINEAR);
imshow("zoomin", zoomin);
图像插值
四种插值方法见CSDN
思考:通过鼠标事件缩放图片
本节代码
.h
#pragma once
#include <opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void resize_demo(Mat &image);
};
.cpp
void QuickDemo::resize_demo(Mat& image)
{
Mat zoomin, zoomout;//放大(景物拉近)、缩小(景物拉远)
int h = image.rows;
int w = image.cols;
resize(image,zoomout,Size(w/2,h/2),0,0,INTER_LINEAR);
//fx和fy默认写0,INTER_LINEAR为线性差值(常用)
imshow("zoomout",zoomout);
resize(image, zoomin, Size(w * 1.5, h * 1.5), 0, 0, INTER_LINEAR);
imshow("zoomin", zoomin);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
QuickDemo qd;
qd.resize_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
图像翻转
filp函数 第一个参数为输入图像 第二个参数为输出图像 第三个参数有三种取值-1、0、1
Mat dst;
flip(image,dst,0);
//上下翻转
imshow("图像翻转", dst);
Mat dst;
flip(image, dst, 1);
//镜像左右翻转
imshow("图像翻转", dst);
Mat dst;
flip(image, dst, -1);
//对角线180°翻转
imshow("图像翻转", dst);
本节代码
.h
#pragma once
#include <opencv2/opencv.hpp>
using namespace cv;
class QuickDemo
{
public:
void flip_demo(Mat &image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::flip_demo(Mat& image)
{
Mat dst;
//flip函数第三个参数有三种取值-1、0、1
//flip(image,dst,0);
//上下翻转
//flip(image, dst, 1);
//镜像左右翻转
flip(image, dst, -1);
//对角线180°翻转
imshow("图像翻转", dst);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("源图", src);
QuickDemo qd;
qd.flip_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
图像旋转
仿射变换函数
- 第三个参数M为2 * 3的矩阵
- 第四个参数为生成的目标图像大小
- 第五个参数为插值的方式,默认为线性插值
- 第六个参数为图像边缘形式
- 第七个参数为图像边缘的颜色,默认为黑色Scalar()
初步旋转实现
Mat dst, M;
int w = image.cols;
int h = image.rows;
M = getRotationMatrix2D(Point2f(w/2,h/2),45,1.0);
//生成2*3矩阵
//第一个参数为原来图像的中心位置Point2f(w/2,h/2)
//第二个参数为旋转角度
//第三个参数为设置放缩,由于warpAffine函数也可以设置缩放,所以这里不需要缩放,设置为1.0
warpAffine(image, dst, M, image.size());//使用默认方法
//warpAffine(image, dst, M, image.size(),INTER_LINEAR,0,Scalar(255,0,0));
imshow("旋转演示", dst);
[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-ccuC6cO4-1687457045366)(https://gitee.com/MyStarOrbit/cloudimages/raw/master/https://gitee.com/MyStarOrbit/cloudimages/image-20230413161853396.png)]
warpAffine(image, dst, M, image.size(),INTER_LINEAR,0,Scalar(255,0,0));//使用边缘处理
发现旋转后的图像并不能完全显现出来
优化实现
求旋转得到的新图宽高
进行矩阵和宽高更新
cos θ和sin θ计算
//由于旋转的度数是任意的,所以一定要保证cos和sin是正数
//abs()得到绝对值
double cos = abs(M.at<double>(0,0));
double sin = abs(M.at<double>(0,1));
新的宽和高为
int nw = cos * w + sin * h;
int nh = sin * w + cos * h;
新的图像中心(原中心+相对偏移量)
M.at<double>(0, 2) = M.at<double>(0, 2) + (nw / 2 - w / 2);
M.at<double>(1, 2) = M.at<double>(1, 2) + (nh / 2 - h / 2);
将新的参数带入
warpAffine(image, dst, M, Size(nw,nh),INTER_LINEAR,0,Scalar(255,0,0));
imshow("旋转演示", dst);
本节代码
.h
#pragma once
#include "opencv2/opencv.hpp"
using namespace cv;
class QuickDemo
{
public:
void rotate_demo(Mat& image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::rotate_demo(Mat& image)
{
Mat dst, M;
int w = image.cols;
int h = image.rows;
M = getRotationMatrix2D(Point2f(w/2,h/2),45,1.0);
//生成2*3矩阵
//第一个参数为原来图像的中心位置Point2f(w/2,h/2)
//第二个参数为旋转角度
//第三个参数为设置放缩,由于warpAffine函数也可以设置缩放,所以这里不需要缩放,设置为1.0
//warpAffine(image, dst, M, image.size());
//由于旋转的度数是任意的,所以一定要保证cos和sin是正数
//abs()得到绝对值
double cos = abs(M.at<double>(0,0));
double sin = abs(M.at<double>(0,1));
int nw = cos * w + sin * h;
int nh = sin * w + cos * h;
M.at<double>(0, 2) = M.at<double>(0, 2) + (nw / 2 - w / 2);
M.at<double>(1, 2) = M.at<double>(1, 2) + (nh / 2 - h / 2);
warpAffine(image, dst, M, Size(nw,nh),INTER_LINEAR,0,Scalar(255,0,0));
imshow("旋转演示", dst);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("源图", src);
QuickDemo qd;
qd.rotate_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
视频文件摄像头使用
摄像头使用
void QuickDemo::video_demo(Mat& image)
{
VideoCapture capture(0);//参数为0:捕获摄像头
Mat frame;
while (true)
{
capture.read(frame);
flip(frame,frame,1);//镜像
if (frame.empty())//用于读取视频文件读到末尾
{
break;
}
imshow("frame", frame);
//TODO:do something...(各种图像处理)
int c = waitKey(10);
if (c == 27)
{
break;
}
}
//release
capture.release();
}
捕获视频
VideoCapture capture("D:/千峰嵌入式/opencv学习/images/01.mp4");
Mat frame;
while (true)
{
capture.read(frame);
flip(frame,frame,1);//镜像
if (frame.empty())//用于读取视频文件读到末尾
{
break;
}
imshow("frame", frame);
//TODO:do something...(各种图像处理)
int c = waitKey(50);//waitKey中的值决定了视频播放速度,1s多少帧
if (c == 27)
{
break;
}
//release
capture.release();
}
特别注意,如果没有特殊要求,waitKey()参数为1,保证程序实时性
添加事件处理
#include "QuickDemo.h"
void QuickDemo::colorSpace_Demo(Mat& image)
{
Mat gray, hsv;
cvtColor(image, hsv, COLOR_BGR2HSV);
//H 0~180,S,V
cvtColor(image, gray, COLOR_BGR2GRAY);
imshow("HSV", hsv);
imshow("灰度",gray);
}
void QuickDemo::video_demo(Mat& image)
{
//VideoCapture capture(0);//捕获摄像头
VideoCapture capture("D:/千峰嵌入式/opencv学习/images/01.mp4");
Mat frame;
while (true)
{
capture.read(frame);
flip(frame,frame,1);//镜像
if (frame.empty())//用于读取视频文件读到末尾
{
break;
}
imshow("frame", frame);
//TODO:do something...(各种图像处理)
colorSpace_Demo(frame);//调用色彩空间转换
int c = waitKey(50);
if (c == 27)
{
break;
}
}
//release
capture.release();
}
本节代码:
.h
#pragma once
#include "opencv2/opencv.hpp"
using namespace cv;
class QuickDemo
{
public:
void colorSpace_Demo(Mat& image);
void video_demo(Mat& image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::colorSpace_Demo(Mat& image)
{
Mat gray, hsv;
cvtColor(image, hsv, COLOR_BGR2HSV);
//H 0~180,S,V
cvtColor(image, gray, COLOR_BGR2GRAY);
imshow("HSV", hsv);
imshow("灰度",gray);
}
void QuickDemo::video_demo(Mat& image)
{
//VideoCapture capture(0);//捕获摄像头
VideoCapture capture("D:/千峰嵌入式/opencv学习/images/01.mp4");
Mat frame;
while (true)
{
capture.read(frame);
flip(frame,frame,1);//镜像
if (frame.empty())//用于读取视频文件读到末尾
{
break;
}
imshow("frame", frame);
//TODO:do something...(各种图像处理)
colorSpace_Demo(frame);//调用色彩空间转换
int c = waitKey(50);
if (c == 27)
{
break;
}
}
//release
capture.release();
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
//imshow("输入图像", src);
QuickDemo qd;
qd.video_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
视频处理与保存
视频有分辨率(清新度)如SD、HD、UHD、帧数、帧率和编码等属性
void QuickDemo::video_demo(Mat& image)
{
//VideoCapture capture(0);//捕获摄像头
VideoCapture capture("D:/千峰嵌入式/opencv学习/images/01.mp4");
int frame_width = capture.get(CAP_PROP_FRAME_WIDTH);
int frame_height = capture.get(CAP_PROP_FRAME_HEIGHT);
int count = capture.get(CAP_PROP_FRAME_COUNT);//帧数
double fps = capture.get(CAP_PROP_FPS);//帧率
std::cout << "frame width:" << frame_width << std::endl;
std::cout << "frame height:" << frame_height << std::endl;
std::cout << "FPS:" << fps << std::endl;
std::cout << "Number of Frames:" << count << std::endl;
Mat frame;
while (true)
{
capture.read(frame);
flip(frame, frame, 1);//镜像
if (frame.empty())//用于读取视频文件读到末尾
{
break;
}
imshow("frame", frame);
//TODO:do something...(各种图像处理)
colorSpace_Demo(frame);//调用色彩空间转换
int c = waitKey(50);
if (c == 27)
{
break;
}
}
//release
capture.release();
}
视频保存
void QuickDemo::video_demo(Mat& image)
{
//VideoCapture capture(0);//捕获摄像头
VideoCapture capture("D:/千峰嵌入式/opencv学习/images/01.mp4");
int frame_width = capture.get(CAP_PROP_FRAME_WIDTH);
int frame_height = capture.get(CAP_PROP_FRAME_HEIGHT);
int count = capture.get(CAP_PROP_FRAME_COUNT);//帧数
double fps = capture.get(CAP_PROP_FPS);//帧率
std::cout << "frame width:" << frame_width << std::endl;
std::cout << "frame height:" << frame_height << std::endl;
std::cout << "FPS:" << fps << std::endl;
std::cout << "Number of Frames:" << count << std::endl;
VideoWriter writer("D:/test.mp4",capture.get(CAP_PROP_FOURCC),fps,Size(frame_width,frame_height),true);
//capture.get(CAP_PROP_FOURCC)得到视频编码
Mat frame;
while (true)
{
capture.read(frame);
flip(frame, frame, 1);//镜像
if (frame.empty())//用于读取视频文件读到末尾
{
break;
}
imshow("frame", frame);
//TODO:do something...(各种图像处理)
colorSpace_Demo(frame);//调用色彩空间转换
writer.write(frame);
int c = waitKey(50);
if (c == 27)
{
break;
}
}
//release
capture.release();
writer.release();
}
如果想自定义视频格式,可以使用set方法
本节代码
.h
#pragma once
#include "opencv2/opencv.hpp"
using namespace cv;
class QuickDemo
{
public:
void colorSpace_Demo(Mat& image);
void video_demo(Mat& image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::colorSpace_Demo(Mat& image)
{
Mat gray, hsv;
cvtColor(image, hsv, COLOR_BGR2HSV);
//H 0~180,S,V
cvtColor(image, gray, COLOR_BGR2GRAY);
imshow("HSV", hsv);
imshow("灰度", gray);
}
void QuickDemo::video_demo(Mat& image)
{
//VideoCapture capture(0);//捕获摄像头
VideoCapture capture("D:/千峰嵌入式/opencv学习/images/01.mp4");
int frame_width = capture.get(CAP_PROP_FRAME_WIDTH);
int frame_height = capture.get(CAP_PROP_FRAME_HEIGHT);
int count = capture.get(CAP_PROP_FRAME_COUNT);//帧数
double fps = capture.get(CAP_PROP_FPS);//帧率
std::cout << "frame width:" << frame_width << std::endl;
std::cout << "frame height:" << frame_height << std::endl;
std::cout << "FPS:" << fps << std::endl;
std::cout << "Number of Frames:" << count << std::endl;
VideoWriter writer("D:/test.mp4",capture.get(CAP_PROP_FOURCC),fps,Size(frame_width,frame_height),true);
//capture.get(CAP_PROP_FOURCC)得到视频编码
Mat frame;
while (true)
{
capture.read(frame);
flip(frame, frame, 1);//镜像
if (frame.empty())//用于读取视频文件读到末尾
{
break;
}
imshow("frame", frame);
//TODO:do something...(各种图像处理)
colorSpace_Demo(frame);//调用色彩空间转换
writer.write(frame);
int c = waitKey(50);
if (c == 27)
{
break;
}
}
//release
capture.release();
writer.release();
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
//imshow("输入图像", src);
QuickDemo qd;
qd.video_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
图像直方图
如下图所示,右边为左边的图像直方图,统计了像素点(0-255范围内)在图像中出现的次数
图像直方图只反映了图像的像素值信息,不能反映图像的空间信息
实现过程
#include "QuickDemo.h"
//一维直方图
void QuickDemo::histogram_demo(Mat& image)
{
//三通道分离
std::vector<Mat>bgr_plane;
split(image,bgr_plane);
//定义参数变量
const int channels[1] = { 0 };
const int bins[1] = { 256 };//256个灰度级别
float hranges[2] = { 0,255 };//通道像素取值范围0-255
const float* ranges[1] = { hranges };//直方图像素取值范围
Mat b_hist;
Mat g_hist;
Mat r_hist;
//计算Blue,Green,Red通道的直方图
calcHist(&bgr_plane[0], 1, 0, Mat(), b_hist, 1, bins, ranges);
calcHist(&bgr_plane[1], 1, 0, Mat(), g_hist, 1, bins, ranges);
calcHist(&bgr_plane[2], 1, 0, Mat(), r_hist, 1, bins, ranges);
//1表示一张源图,b_hist表示blue通道的直方图输出,1表示一维
//显示直方图
int hist_w = 512;
int hist_h = 400;
int bin_w = cvRound((double)hist_w/bins[0]);
Mat histImage = Mat::zeros(hist_h,hist_w,CV_8UC3);
//归一化直方图数据
normalize(b_hist, b_hist, 0, histImage.rows, NORM_MINMAX, -1, Mat());
normalize(g_hist, g_hist, 0, histImage.rows, NORM_MINMAX, -1, Mat());
normalize(r_hist, r_hist, 0, histImage.rows, NORM_MINMAX, -1, Mat());
//绘制直方图曲线
for (int i = 1; i < bins[0]; i++)
{
line(histImage,Point(bin_w*(i-1),hist_h-cvRound(b_hist.at<float>(i-1))),
Point(bin_w*(i),hist_h-cvRound(b_hist.at<float>(i))),Scalar(255,0,0),2,8,0);
line(histImage, Point(bin_w * (i - 1), hist_h - cvRound(g_hist.at<float>(i - 1))),
Point(bin_w * (i), hist_h - cvRound(g_hist.at<float>(i))), Scalar(0, 255, 0), 2, 8, 0);
line(histImage, Point(bin_w * (i - 1), hist_h - cvRound(r_hist.at<float>(i - 1))),
Point(bin_w * (i), hist_h - cvRound(r_hist.at<float>(i))), Scalar(0, 0, 255), 2, 8, 0);
}
//显示直方图
namedWindow("Histogram Demo",WINDOW_AUTOSIZE);
imshow("Histogram Demo",histImage);
}
二维直方图
BIN为步长
#include "QuickDemo.h"
void QuickDemo::histogram_2d_demo(Mat& image)
{
//2D直方图
Mat hsv, hs_hist;
cvtColor(image,hsv,COLOR_BGR2HSV);
int hbins = 30, sbins = 32;
//h通道步长:180/30=6 h通道步长:256/32
int hist_bins[] = {hbins,sbins};
float h_range[] = { 0,180 };
float s_range[] = { 0,256 };
const float* hs_ranges[] = {h_range,s_range};
int hs_channels[] = { 0,1 };
calcHist(&hsv,1,hs_channels,Mat(),hs_hist,2,hist_bins,hs_ranges,true,false);
//mask默认为Mat()
double maxVal = 0;
minMaxLoc(hs_hist,0,&maxVal,0,0);
int scale = 10;
Mat hist2d_image = Mat::zeros(sbins*scale,hbins*scale,CV_8UC3);
for (int h = 0; h < hbins; h++)
{
for (int s = 0; s < sbins; s++)
{
float binVal = hs_hist.at<float>(h, s);
int intensity = cvRound(binVal * 255 / maxVal);
rectangle(hist2d_image, Point(h * scale, s * scale),
Point((h + 1) * scale - 1, (s + 1) * scale - 1),
Scalar::all(intensity),
-1);
}
}
applyColorMap(hist2d_image,hist2d_image,COLORMAP_JET);
imshow("H-S Histogram",hist2d_image);
imwrite("D:/hist_2d.png", hist2d_image);
}
直方图均衡化 PS:看不懂先去看计算机图形学
分布函数
均衡化前
均衡化后
直方图均衡化演示(针对灰度图像)
void QuickDemo::histogram_eq_demo(Mat& image)
{
Mat gray;
cvtColor(image,gray,COLOR_BGR2GRAY);
imshow("灰度图像", gray);
Mat dst;
equalizeHist(gray, dst);
imshow("直方图均衡化演示", dst);
}
彩色图像自行补充
本节代码
.h
#pragma once
#include "opencv2/opencv.hpp"
using namespace cv;
class QuickDemo
{
public:
void histogram_eq_demo(Mat &image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::histogram_eq_demo(Mat& image)
{
Mat gray;
cvtColor(image,gray,COLOR_BGR2GRAY);
imshow("灰度图像", gray);
Mat dst;
equalizeHist(gray, dst);
imshow("直方图均衡化演示", dst);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
//imshow("输入图像", src);
QuickDemo qd;
qd.histogram_eq_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
图像卷积操作
卷积介绍
通过卷积操作,高的降低,低的提高,降低图像的对比图,使图像产生模糊效果,所以卷积操作又叫做图像模糊
以3*3卷积核为例
从左到右,从上到下,对九宫格中心做卷积操作,完成均值的输出
API函数
[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-gWD9x3A7-1687457045373)(…/…/typora-user-images/image-20230414111113541.png)]
实现过程
Ksize默认为1,均值卷积
Size(15*15)表示15 *15的卷积,数值越大,图像越模糊
Point(-1,-1)表示默认用卷积核的中心位置作为输出点
borderType为边缘处理方式
#include "QuickDemo.h"
void QuickDemo::blur_demo(Mat& image)
{
Mat dst;
blur(image, dst, Size(15, 15),Point(-1,-1));//二维卷积//边缘处理默认用default
//blur(image, dst, Size(15, 1), Point(-1, -1));//水平方向一维卷积
//blur(image, dst, Size(1, 15), Point(-1, -1));//垂直方向一维卷积
imshow("图像模糊",dst);
}
本节代码
.h
#pragma once
#include "opencv2/opencv.hpp"
using namespace cv;
class QuickDemo
{
public:
void blur_demo(Mat &image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::blur_demo(Mat& image)
{
Mat dst;
blur(image, dst, Size(15, 15),Point(-1,-1));//二维卷积
//blur(image, dst, Size(15, 1), Point(-1, -1));//水平方向一维卷积
//blur(image, dst, Size(1, 15), Point(-1, -1));//垂直方向一维卷积
imshow("图像模糊",dst);
//边缘处理默认用default
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
Mat src = imread("D:/img/girlExample.jpg");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("输入图像", src);
QuickDemo qd;
qd.blur_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
高斯模糊
高斯矩阵中中心点系数最大,越远离中心点,系数越小
高斯模糊产生的效果有中心化效益,就是中心像素所占的比重最大,从空间位置上来说,它考虑了中心位置对整个卷积输出的贡献,而均值卷积没考虑到这一点,只是取了平均
高斯双边模糊
介绍
- 图中左上角阶梯状图像,高阶代表灰度颜色(高像素值),低阶代表黑色部分(低像素值)
- 白色点代表当前要进行高斯模糊的中心点
- 用普通高斯模糊会导致图像边缘丢失,导致边缘变模糊
- 因为彩色图像一共有五个维度:x、y、R、G、B
- 而普通空间卷积核产生的窗口只会对坐标空间x和y维度进行处理,忽略了RGB三个维度
- 考虑了色彩空间颜色值的差异,要保留边缘差异,所以还需要另一个高斯卷积核来处理色彩
- 双边卷积核=坐标空间卷积核 * 色彩空间卷积核
- 从而通过双边滤波得到右上角的图像
API解释
- d表示窗口,可以通过sigmaColor和sigmaSpace来计算
- sigmaSpace为空间窗口的sigma,一般取10或15
- sigmaColor为色彩窗口的sigma,值越大,处理效果越好
实现过程
.h
#pragma once
#include "opencv2/opencv.hpp"
using namespace cv;
class QuickDemo
{
public:
void bifilter_demo(Mat &image);
};
.cpp
#include "QuickDemo.h"
void QuickDemo::bifilter_demo(Mat& image)
{
Mat dst;
bilateralFilter(image,dst,0,100,10);
imshow("双边模糊", dst);
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
Mat src = imread("D:/千峰嵌入式/opencv学习/images/example.png");
if (src.empty())
{
cout << "load image failed...\n";
return -1;
}
imshow("输入图像", src);
QuickDemo qd;
qd.bifilter_demo(src);
waitKey(0);
destroyAllWindows();
return 0;
}
案例:实时人脸识别
人脸识别深度学习模型,OpenCV4及以上版本
.h
#pragma once
#include "opencv2/opencv.hpp"
using namespace cv;
class QuickDemo
{
public:
void face_detection_demo();
};
.cpp
#include "QuickDemo.h"
#include <opencv2/dnn.hpp>
using namespace cv;
using namespace std;
void QuickDemo::face_detection_demo()
{
std::string root_dir = "D:\\software\\opencv\\sources\\samples\\dnn\\face_detector\\";
dnn::Net net=cv::dnn::readNetFromTensorflow(root_dir+"opencv_face_detector_uint8.pb",root_dir+"opencv_face_detector.pbtxt");
//读取深度学习网络(深度学习模型+配置文件)
VideoCapture capture("D:/千峰嵌入式/opencv学习/images/晴天.mp4");
Mat frame;
while (true) {
capture.read(frame);
//flip(frame, frame, 1);//镜像用于摄像头
if (frame.empty())//用于读取视频文件读到末尾
{
break;
}
Mat blob = dnn::blobFromImage(frame, 1.0, Size(300, 300), Scalar(104,177,123),false,false);
//读数据
net.setInput(blob);//NCHW
//准备数据
Mat probs = net.forward();//
//完成检测
Mat detectionMat(probs.size[2],probs.size[3],CV_32F,probs.ptr<float>());
//解析结果
for (int i = 0; i < detectionMat.rows; i++)
{
float confidence = detectionMat.at<float>(i, 2);
if (confidence > 0.5) {
int x1 = static_cast<int>(detectionMat.at<float>(i, 3) * frame.cols);
int y1 = static_cast<int>(detectionMat.at<float>(i, 4) * frame.rows);
int x2 = static_cast<int>(detectionMat.at<float>(i, 5) * frame.cols);
int y2 = static_cast<int>(detectionMat.at<float>(i, 6) * frame.rows);
Rect box(x1, y1, x2 - x1, y2 - y1);
rectangle(frame, box, Scalar(0, 0, 255), 2, 8, 0);
}
}
imshow("人脸检测演示", frame);
int c = waitKey(1);
if (c == 27)
{
break;
}
}
}
main.cpp
#include <iostream>
#include "QuickDemo.h"
using namespace std;
int main()
{
QuickDemo qd;
qd.face_detection_demo();
waitKey(0);
destroyAllWindows();
return 0;
}
演示结果文章来源:https://www.toymoban.com/news/detail-507381.html
文章来源地址https://www.toymoban.com/news/detail-507381.html
到了这里,关于OpenCV 学习笔记(C++)(1.4W字)的文章就介绍完了。如果您还想了解更多内容,请在右上角搜索TOY模板网以前的文章或继续浏览下面的相关文章,希望大家以后多多支持TOY模板网!