滤波器作为图像处理课程的重要内容,大致可分为两类,空域滤波器和频率域滤波器。本文主要介绍常用的四种滤波器:中值滤波器、均值滤波器、高斯滤波器、双边滤波器,并基于opencv做出实现。空域的滤波器一般可以通过模板对原图像进行卷积进行,卷积的相关知识请自行学习。

理论知识:

线性滤波器表达公式:,其中均值滤波器和高斯滤波器属于线性滤波器,首先看这两种滤波器

均值滤波器:

模板:

从待处理图像首元素开始用模板对原始图像进行卷积,均值滤波直观地理解就是用相邻元素灰度值的平均值代替该元素的灰度值。

高斯滤波器:

模板:通过高斯内核函数产生的

高斯内核函数:

例如3*3的高斯内核模板:

中值滤波:同样是空间域的滤波,主题思想是取相邻像素的点,然后对相邻像素的点进行排序,取中点的灰度值作为该像素点的灰度值。

双边滤波:

C++代码实现:

static void exchange(int& a, int& b)
{	
	int t = 0;
	t = a;
	a = b;
	b = t;
}

static void bubble_sort(int* K, int lenth)
{
	for (int i = 0; i < lenth; i++)
		for (int j = i + 1; j < lenth; j++)
		{
			if (K[i]>K[j])
				exchange(K[i], K[j]);
		}
}
///产生二维的高斯内核
static cv::Mat generate_gassian_kernel(double u, double sigma, cv::Size size)
{
	int width = size.width;
	int height = size.height;
	cv::Mat gassian_kernel(cv::Size(width, height), CV_64FC1);
	double sum = 0;
	double sum_sum = 0;
	for (int i = 0; i < width; i++)
		for (int j = 0; j < height; j++)
		{
			sum = 1.0 / 2.0 / CV_PI / sigma / sigma * exp(-1.0 * ((i - width / 2)*(i - width / 2) + (j - width / 2)*(j - width / 2)) / 2.0 / sigma / sigma);
			sum_sum += sum;
			gassian_kernel.ptr<double>(i)[j] = sum;
		}
	for (int i = 0; i < width; i++)
		for (int j = 0; j < height; j++)
		{
			gassian_kernel.ptr<double>(i)[j] /= sum_sum;
		}
	return gassian_kernel;
}
///均值滤波
void lmt_main_blur(cv::Mat& img_in, cv::Mat& img_out, int kernel_size)
{
	img_out = img_in.clone();
	cv::Mat mat1;
	cv::copyMakeBorder(img_in, mat1, kernel_size, kernel_size, kernel_size, kernel_size, cv::BORDER_REPLICATE);

	int cols = mat1.cols;
	int rows = mat1.rows;
	int channels = img_out.channels();
	const uchar* const pt = mat1.ptr<uchar>(0);
	uchar* pt_out = img_out.ptr<uchar>(0);

	for (int i = kernel_size; i < rows - kernel_size; i++)
	{
		for (int j = kernel_size; j < cols - kernel_size; j++)
		{
			if (channels == 1)
			{
				long long int sum_pixel = 0;
				for (int m = -1 * kernel_size; m < kernel_size; m++)
					for (int n = -1 * kernel_size; n < kernel_size; n++)
					{
						sum_pixel += pt[(i + m)*cols + (j + n)];
					}
				img_out.ptr<uchar>(i - kernel_size)[j - kernel_size] = (double)sum_pixel / (kernel_size*kernel_size * 4);
			}
			else if (channels == 3)
			{
				long long int sum_pixel = 0;
				long long int sum_pixel1 = 0;
				long long int sum_pixel2 = 0;
				for (int m = -1 * kernel_size; m < kernel_size; m++)
					for (int n = -1 * kernel_size; n < kernel_size; n++)
					{
						sum_pixel += pt[((i + m)*cols + (j + n))*channels + 0];
						sum_pixel1 += pt[((i + m)*cols + (j + n))*channels + 1];
						sum_pixel2 += pt[((i + m)*cols + (j + n))*channels + 2];
					}
				img_out.ptr<uchar>(i - kernel_size)[(j - kernel_size)*channels + 0] = (double)sum_pixel / (double)(kernel_size*kernel_size * 4);
				img_out.ptr<uchar>(i - kernel_size)[(j - kernel_size)*channels + 1] = (double)sum_pixel1 / (double)(kernel_size*kernel_size * 4);
				img_out.ptr<uchar>(i - kernel_size)[(j - kernel_size)*channels + 2] = (double)sum_pixel2 / (double)(kernel_size*kernel_size * 4);
			}
		}
	}

}
///中值滤波
void lmt_median_blur(cv::Mat& img_in, cv::Mat& img_out, int kernel_size)
{
	img_out = img_in.clone();
	cv::Mat mat1;
	cv::copyMakeBorder(img_in, mat1, kernel_size, kernel_size, kernel_size, kernel_size, cv::BORDER_REPLICATE);

	int cols = mat1.cols;
	int rows = mat1.rows;
	int channels = img_out.channels();

	cv::Mat mat[3];
	cv::Mat mat_out[3];
	cv::split(mat1, mat);
	cv::split(img_out, mat_out);
	for (int k = 0; k < 3; k++)
	{
		const uchar* const pt = mat[k].ptr<uchar>(0);
		uchar* pt_out = mat_out[k].ptr<uchar>(0);
		for (int i = kernel_size; i < rows - kernel_size; i++)
		{
			for (int j = kernel_size; j < cols - kernel_size; j++)
			{
				long long int sum_pixel = 0;
				int* K = new int[kernel_size*kernel_size * 4];
				int ker_num = 0;
				for (int m = -1 * kernel_size; m < kernel_size; m++)
					for (int n = -1 * kernel_size; n < kernel_size; n++)
					{
						K[ker_num] = pt[(i + m)*cols + (j + n)];
						ker_num++;
					}
				bubble_sort(K, ker_num);
				mat_out[k].ptr<uchar>(i - kernel_size)[j - kernel_size] = K[ker_num / 2];
			}
		}
	}
	cv::merge(mat_out, 3, img_out);
}
///高斯滤波
void lmt_gaussian_blur(cv::Mat& img_src, cv::Mat& img_dst, cv::Size kernel_size)
{
	img_dst = cv::Mat(cv::Size(img_src.cols, img_src.rows), img_src.type());
	int cols = img_src.cols;
	int rows = img_src.rows;
	int channels = img_src.channels();
	cv::Mat gassian_kernel = generate_gassian_kernel(0, 1, kernel_size);
	int width = kernel_size.width / 2;
	int height = kernel_size.height / 2;
	for (int i = height; i < rows - height; i++)
	{
		for (int j = width; j < cols - width; j++)
		{
			for (int k = 0; k < channels; k++)
			{
				double sum = 0.0;
				for (int m = -height; m <= height; m++)
				{
					for (int n = -width; n <= width; n++)
					{
						sum += (double)(img_src.ptr<uchar>(i + m)[(j + n)*channels + k]) * gassian_kernel.ptr<double>(height + m)[width + n];
					}
				}
				if (sum > 255.0)
					sum = 255;
				if (sum < 0.0)
					sum = 0;
				img_dst.ptr<uchar>(i)[j*channels + k] = (uchar)sum;
			}
		}
	}

	
}
///双边滤波
void lmt_bilateral_filter(cv::Mat& img_in, cv::Mat& img_out, const int r, double sigma_d, double sigma_r)
{
	int i, j, m, n, k;
	int nx = img_in.cols, ny = img_in.rows, m_nChannels = img_in.channels();
	const int w_filter = 2 * r + 1; // 滤波器边长  

	double gaussian_d_coeff = -0.5 / (sigma_d * sigma_d);
	double gaussian_r_coeff = -0.5 / (sigma_r * sigma_r);
	double  **d_metrix = new double *[w_filter];
	for (int i = 0; i < w_filter; ++i)
		d_metrix[i] = new double[w_filter];
	
	double r_metrix[256];  // similarity weight  
	img_out = cv::Mat(img_in.size(),img_in.type());
	uchar* m_imgData = img_in.ptr<uchar>(0);
	uchar* m_img_outData = img_out.ptr<uchar>(0);
	// copy the original image  
	double* img_tmp = new double[m_nChannels * nx * ny];
	for (i = 0; i < ny; i++)
		for (j = 0; j < nx; j++)
			for (k = 0; k < m_nChannels; k++)
			{
				img_tmp[i * m_nChannels * nx + m_nChannels * j + k] = m_imgData[i * m_nChannels * nx + m_nChannels * j + k];
			}

	// compute spatial weight  
	for (i = -r; i <= r; i++)
		for (j = -r; j <= r; j++)
		{
			int x = j + r;
			int y = i + r;

			d_metrix[y][x] = exp((i * i + j * j) * gaussian_d_coeff);
		}

	// compute similarity weight  
	for (i = 0; i < 256; i++)
	{
		r_metrix[i] = exp(i * i * gaussian_r_coeff);
	}

	// bilateral filter  
	for (i = 0; i < ny; i++)
		for (j = 0; j < nx; j++)
		{
			for (k = 0; k < m_nChannels; k++)
			{
				double weight_sum, pixcel_sum;
				weight_sum = pixcel_sum = 0.0;

				for (m = -r; m <= r; m++)
					for (n = -r; n <= r; n++)
					{
						if (m*m + n*n > r*r) continue;

						int x_tmp = j + n;
						int y_tmp = i + m;

						x_tmp = x_tmp < 0 ? 0 : x_tmp;
						x_tmp = x_tmp > nx - 1 ? nx - 1 : x_tmp;   // 边界处理,replicate  
						y_tmp = y_tmp < 0 ? 0 : y_tmp;
						y_tmp = y_tmp > ny - 1 ? ny - 1 : y_tmp;

						int pixcel_dif = (int)abs(img_tmp[y_tmp * m_nChannels * nx + m_nChannels * x_tmp + k] - img_tmp[i * m_nChannels * nx + m_nChannels * j + k]);
						double weight_tmp = d_metrix[m + r][n + r] * r_metrix[pixcel_dif];  // 复合权重  

						pixcel_sum += img_tmp[y_tmp * m_nChannels * nx + m_nChannels * x_tmp + k] * weight_tmp;
						weight_sum += weight_tmp;
					}

				pixcel_sum = pixcel_sum / weight_sum;
				m_img_outData[i * m_nChannels * nx + m_nChannels * j + k] = (uchar)pixcel_sum;

			} // 一个通道  

		} // END ALL LOOP  
	for (i = 0; i < w_filter; i++)
		delete[] d_metrix[i];
	delete[] d_metrix;
}

Opencv API函数实现:

opencv相关函数简介:

双边滤波函数:bilateralFilter(InputArray src, OutputArray dst, int d, double sigmaColor, double sigmaSpace,int borderType=BORDER_DEFAULT )

   src待滤波图像

   dst滤波后图像

   d滤波器半径

   sigmaColor滤波器值域的sigma

   sigmaSpace滤波器空间域的sigma

   borderType边缘填充方式 BORDER_REPLICATE BORDER_REFLECT BORDER_DEFAULT BORDER_REFLECT_101BORDER_TRANSPARENT BORDER_ISOLATED

 

均值滤波函数:blur(InputArray src, OutputArray dst, Size ksize, Point anchor=Point(-1,-1), intborderType=BORDER_DEFAULT );

   src待滤波图像

   dst滤波后图像

   ksize 均值滤波器的大小

   anchor均值滤波器的锚点也就是模板移动点

   borderType边缘填充方式 BORDER_REPLICATE BORDER_REFLECT BORDER_DEFAULT BORDER_REFLECT_101BORDER_TRANSPARENT BORDER_ISOLATED

 

高斯滤波函数:GaussianBlur(InputArray src, OutputArray dst, Size ksize, double sigmaX, double sigmaY=0,int borderType=BORDER_DEFAULT );

   src待滤波图像

   dst滤波后图像

   ksize 高斯滤波器的大小

   sigmaX 高斯滤波器的x方向的滤波器高斯sigma

   sigmaY 高斯滤波器的y方向的滤波器高斯sigma

   borderType边缘填充方式 BORDER_REPLICATE BORDER_REFLECT BORDER_DEFAULT BORDER_REFLECT_101BORDER_TRANSPARENT BORDER_ISOLATED

 

中值滤波函数:medianBlur(InputArray src, OutputArray dst, int ksize );

    src待滤波图像

    dst滤波后图像

    ksize 中值滤波器的大小

函数演示:

void bilateral_filter_show(void)
{
	cv::Mat mat1 = cv::imread("F:\\CVlibrary\\obama.jpg", CV_LOAD_IMAGE_GRAYSCALE); //灰度图加载进来,BGR->HSV 然后取H参数
	if (mat1.empty())
		return;
	cv::imshow("原图像", mat1); 
	cv::Mat src = cv::imread("F:\\CVlibrary\\obama.jpg");
	cv::imshow("原始彩色图像", src);
	std::cout << "channel = " << mat1.channels() << std::endl;
	
	cv::Mat mat3;
	cv::bilateralFilter(src, mat3, 5, 50, 50,cv::BORDER_DEFAULT);
	cv::imshow("opencv给出的双边滤波器", mat3);
	cv::Mat mat4;
	cv::blur(src, mat4, cv::Size(3, 3));
	cv::imshow("均值滤波", mat4);
	cv::Mat mat5;
	cv::GaussianBlur(src, mat5, cv::Size(5, 5), 1,1);
	cv::imshow("高斯滤波器", mat5);
	cv::Mat mat6;
	cv::medianBlur(src, mat6, 3);
	cv::imshow("中值滤波", mat6); 
	cv::Mat mat7;
	lmt_gaussian_blur(src, mat7, cv::Size(5, 5));
	cv::imshow("my gaussian image",mat7);

	cv::waitKey(0);
}

版权声明:本文为liumantang原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接:https://www.cnblogs.com/liumantang/p/11830386.html