Vec3b对应三通道的顺序是blue、green、red的uchar类型数据。
Vec3f对应三通道的float类型数据
把CV_8UC1(uchar)转换到CV32F1实现: src.convertTo(dst, CV_32F);
vs代码
#include "../common/common.hpp"
void main1_5(int argc, char ** argv)
{
Mat src = imread(getCVImagesPath("images/test1_3.png"), IMREAD_COLOR);
namedWindow("input", CV_WINDOW_AUTOSIZE);
imshow("input", src);
Mat gray;
cvtColor(src, gray, COLOR_BGR2GRAY);
imshow("output1", gray);
Mat gray_cha;
bitwise_not(gray, gray_cha);//颜色值取反,同下面算法一致,这个是通过位操作 与或非 实现的
imshow("output2", gray_cha);
int height = gray.rows;
int width = gray.cols;
for (int row = 0; row < height; row++)
{
for (int col = 0; col < width; col++)
{
int pixel = gray.at<uchar>(row, col);//读取图像中 row,col 位置的像素(颜色数据由单通道组成),所以 <> 中用uchar,只返回的是一个uchar值
gray.at<uchar>(row, col) = 255 - pixel;//颜色值取反,灰度图的单通道也是一个字节大小,所以值在0-255之间
}
}
imshow("output3", gray);
Mat dst_cha;
bitwise_not(src, dst_cha);//颜色值取反,同下面算法一致
imshow("output4", dst_cha);
Mat dst;
dst.create(src.size(), src.type());
height = src.rows;
width = src.cols;
int nc = src.channels();
Mat convert;
src.convertTo(convert, CV_32F);//把CV_8UC1(uchar)转换到CV32F1(float), Vec3b对应三通道的顺序是blue、green、red的uchar类型数据。Vec3f对应三通道的float类型数据
for (int row = 0; row < height; row++)
{
for (int col = 0; col < width; col++)
{
int b = src.at<Vec3b>(row, col)[0];//读取图像中 row,col 位置的像素(颜色数据由三通道组成),src是三通道的RGB,所以 <> 传的是Vec3b,返回的是长度3的uchar数组
int g = src.at<Vec3b>(row, col)[1];
int r = src.at<Vec3b>(row, col)[2];
float f_b = convert.at<Vec3f>(row, col)[0];//Vec3f
float f_g = convert.at<Vec3f>(row, col)[1];
float f_r = convert.at<Vec3f>(row, col)[2];
dst.at<Vec3b>(row, col)[0] = 255 - b;//颜色值取反,若分别将 b g r 通道颜色设为0,图片整体颜色为 偏黄、偏绯红、偏青 色
dst.at<Vec3b>(row, col)[1] = 255 - g;
dst.at<Vec3b>(row, col)[2] = 255 - r;
gray.at<uchar>(row, col) = min(b, min(g, r));//随意设置的灰度图,取max图像会比取min亮
}
}
imshow("output5", dst);
imshow("output6", gray);
waitKey(0);
}
android代码
@BindView(R.id.iv_opencv1_5_input) ImageView mInputIv;
@BindView(R.id.iv_opencv1_5_output1) ImageView mGrayIv;
@BindView(R.id.iv_opencv1_5_output2) ImageView mGrayFan1Iv;
@BindView(R.id.iv_opencv1_5_output3) ImageView mGrayFan2Iv;
@BindView(R.id.iv_opencv1_5_output4) ImageView mRgbFan1Iv;
@BindView(R.id.iv_opencv1_5_output5) ImageView mRgbFan2Iv;
@BindView(R.id.iv_opencv1_5_output6) ImageView mGrayMinIv;
@BindView(R.id.iv_opencv1_5_output7) ImageView mConvertIv;
private Bitmap mInputBmp;
private Bitmap mGrayBmp;
private Bitmap mGrayFan1Bmp;
private Bitmap mGrayFan2Bmp;
private Bitmap mRgbFan1Bmp;
private Bitmap mRgbFan2Bmp;
private Bitmap mGrayMinBmp;
private Bitmap mConvertBmp;
private Mat mInputMat = new Mat();
private Mat mGrayMat = new Mat();
private Mat mGrayFan1Mat = new Mat();
private Mat mGrayFan2Mat;
private Mat mRgbFan1Mat = new Mat();
private Mat mRgbFan2Mat;
private Mat mGrayMinMat;
private Mat mConvertMat = new Mat();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_cv1_5);
mUnbinder = ButterKnife.bind(this);
//input
mInputBmp = CV310Utils.getBitmapFromAssets(this, "opencv/test1_3.png");
mInputIv.setImageBitmap(mInputBmp);
initBitmaps(mInputBmp);
//gray
Utils.bitmapToMat(mInputBmp, mInputMat);
Imgproc.cvtColor(mInputMat, mGrayMat, Imgproc.COLOR_BGR2GRAY);
Utils.matToBitmap(mGrayMat, mGrayBmp);
mGrayIv.setImageBitmap(mGrayBmp);
//gray fan1
long start = System.currentTimeMillis();
Core.bitwise_not(mGrayMat, mGrayFan1Mat);//颜色值取反,同下面算法一致,这个是通过位操作 与或非 实现的
LogUtils.d("mydebug---", "gray fan1 : "+(System.currentTimeMillis()-start));//0毫秒。。
Utils.matToBitmap(mGrayFan1Mat, mGrayFan1Bmp);
mGrayFan1Iv.setImageBitmap(mGrayFan1Bmp);
//gray fan2
mGrayFan2Mat = new Mat(mGrayMat.size(), mGrayMat.type());
int height = mGrayMat.rows();
int width = mGrayMat.cols();
start = System.currentTimeMillis();
for (int row = 0; row < height; row++) {
for (int col = 0; col < width; col++) {
byte pixel[] = new byte[1];//由于是单通道的uchar,这里只能用byte,不能用int
mGrayMat.get(row, col, pixel);//读取图像中 row,col 位置的像素(颜色数据由单通道组成),所以只需要读一个值
pixel[0] = (byte) (255 - CV310Utils.byte2unsignedchar(pixel[0]));//颜色值取反,灰度图的单通道也是一个字节大小,所以值在0-255之间
mGrayFan2Mat.put(row, col, pixel);
}
}
LogUtils.d("mydebug---", "gray fan2 : "+(System.currentTimeMillis()-start));//1200毫秒
Utils.matToBitmap(mGrayFan2Mat, mGrayFan2Bmp);
mGrayFan2Iv.setImageBitmap(mGrayFan2Bmp);
//Rgb fan1
start = System.currentTimeMillis();
Core.bitwise_not(mInputMat, mRgbFan1Mat);
LogUtils.d("mydebug---", "Rgb fan1 : "+(System.currentTimeMillis()-start));//4毫秒。。
Utils.matToBitmap(mRgbFan1Mat, mRgbFan1Bmp);
mRgbFan1Iv.setImageBitmap(mRgbFan1Bmp);
//Rgb fan2
mRgbFan2Mat = new Mat(mInputMat.size(), mInputMat.type());
mGrayMinMat = new Mat(mGrayMat.size(), mGrayMat.type());
height = mInputMat.rows();
width = mInputMat.cols();
//convert
mInputMat.convertTo(mConvertMat, CvType.CV_32F);//把CV_8UC1(uchar)转换到CV32F1(float), Vec3b对应三通道的顺序是blue、green、red的uchar类型数据。Vec3f对应三通道的float类型数据
start = System.currentTimeMillis();
for (int row = 0; row < height; row++) {
for (int col = 0; col < width; col++) {
byte rgba[] = new byte[4];//这里是 rgba ,alpha 对 mInputMat 图像会起作用
mInputMat.get(row, col, rgba);//读取图像中 row,col 位置的像素(颜色数据由四通道RGBA组成),返回的是长度4的uchar数组
byte r = rgba[0];
byte g = rgba[1];
byte b = rgba[2];
rgba[0] = (byte) (255 - CV310Utils.byte2unsignedchar(rgba[0]));//颜色值取反
rgba[1] = (byte) (255 - CV310Utils.byte2unsignedchar(rgba[1]));
rgba[2] = (byte) (255 - CV310Utils.byte2unsignedchar(rgba[2]));
mRgbFan2Mat.put(row, col, rgba);
//gray min
byte pixel[] = new byte[1];
pixel[0] = (byte) Math.min(CV310Utils.byte2unsignedchar(r), Math.min(CV310Utils.byte2unsignedchar(g), CV310Utils.byte2unsignedchar(b)));//要用unsigned char比较
mGrayMinMat.put(row, col, pixel);//随意设置的灰度图,取max图像会比取min亮
float f_rgba[] = new float[4];
mConvertMat.get(row, col, f_rgba);
f_rgba[2] = 0;//2是b,若分别将 b g r 通道颜色设为0,图片整体颜色为 偏黄、偏绯红、偏青 色
mConvertMat.put(row, col, f_rgba);
}
}
LogUtils.d("mydebug---", "Rgb fan2 : "+(System.currentTimeMillis()-start));//1500毫秒 全部写完后 耗时将近4秒
Utils.matToBitmap(mRgbFan2Mat, mRgbFan2Bmp);
mRgbFan2Iv.setImageBitmap(mRgbFan2Bmp);
Utils.matToBitmap(mGrayMinMat, mGrayMinBmp);
mGrayMinIv.setImageBitmap(mGrayMinBmp);
//convert
Mat mat = new Mat();
mConvertMat.convertTo(mat, CvType.CV_8UC1);
Utils.matToBitmap(mat, mConvertBmp);//src.type() == CV_8UC1 || src.type() == CV_8UC3 || src.type() == CV_8UC4 matToBitmap只支持这几种type转换,所以需要把CV_32F再转成CV_8UC1
mConvertIv.setImageBitmap(mConvertBmp);
}
/*初始化bitmap*/
private void initBitmaps(Bitmap bmp){
mGrayBmp = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), Bitmap.Config.RGB_565);
mGrayFan1Bmp = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), Bitmap.Config.RGB_565);
mGrayFan2Bmp = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), Bitmap.Config.RGB_565);
mRgbFan1Bmp = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), Bitmap.Config.RGB_565);
mRgbFan2Bmp = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), Bitmap.Config.RGB_565);
mGrayMinBmp = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), Bitmap.Config.RGB_565);
mConvertBmp = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), Bitmap.Config.RGB_565);
}
效果图