程序员求职经验分享与学习资料整理平台

网站首页 > 文章精选 正文

Java使用OpenCV进行答题卡识别

balukai 2025-01-01 17:57:10 文章精选 6 ℃

下载OpenCV

https://github.com/opencv/opencv/releases

下载这个

下面的exe也可以 这个exe是一个自解压文件

安装的时候选择目录的时候不用新建 opencv ,在解压的时候会自动创建 opencv 文件夹。

加载依赖

项目中添加jar

jar的位置在安装目录下 build\java 的目录下。

下面两种方式任选其一即可。

推荐方式1,方式2在Linux上就失效了。

方式1

添加测试代码

import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;

public class Test01 {
  static {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
  }

  public static void main(String[] args) {
    System.out.println("Welcome to OpenCV " + Core.VERSION);
    Mat m = new Mat(5, 10, CvType.CV_8UC1, new Scalar(0));
    System.out.println("OpenCV Mat: " + m);
    Mat mr1 = m.row(1);
    mr1.setTo(new Scalar(1));
    Mat mc5 = m.col(5);
    mc5.setTo(new Scalar(5));
    System.out.println("OpenCV Mat data:\n" + m.dump());
  }
}

配置运行时参数。

通过菜单 Run->Edit Configurations... 打开 Run/Debug Configurations 对话框。

在对话框窗口右侧,找到 VM options 标签对应的文本框。

在文本框中填写参数

-Djava.library.path=D:\Tools\opencv\build\java\x64

看好自己电脑是64为就选x64,32位就选x86。

当然也可以直接把 opencv_java455.dll 放在Java的bin目录下,这样也就不用配置 java.library.path

D:\Tools\Java\jdk1.8.0_102\bin

方式2

这种方式就不用再指定 java.library.path 了。

把jar和dll放到项目中。

代码中

import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import java.net.URL;

public class Test01 {
  static {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    URL url = ClassLoader.getSystemResource("lib/opencv/opencv_java455.dll");
    System.load(url.getPath());
  }

  public static void main(String[] args) {
    System.out.println("Welcome to OpenCV " + Core.VERSION);
    Mat m = new Mat(5, 10, CvType.CV_8UC1, new Scalar(0));
    System.out.println("OpenCV Mat: " + m);
    Mat mr1 = m.row(1);
    mr1.setTo(new Scalar(1));
    Mat mc5 = m.col(5);
    mc5.setTo(new Scalar(5));
    System.out.println("OpenCV Mat data:\n" + m.dump());
  }
}

常用方法

import org.opencv.core.*;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import utils.OpenCVUtil;


public class Test01 {
    static {
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    }

    public static void main(String[] args) {
        // 以灰度方式,读取图片
        Mat img = Imgcodecs.imread("D:\\Pic\\0.png", Imgcodecs.IMREAD_GRAYSCALE);
        Imgcodecs.imwrite("D:\\Pic\\1.png", img);
        
        // 转成二值化图片
        Mat img2 = new Mat();
        Imgproc.threshold(img, img2, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);
        Imgcodecs.imwrite("D:\\Pic\\2.png", img2);
        
        // 膨胀
        Mat img3 = OpenCVUtil.eroding(img2);
        Imgcodecs.imwrite("D:\\Pic\\3.png", img3);


    }
}

如上就分别展示了

  1. 图片转灰度
  2. 灰度图片二值化
  3. 二值化图片黑色区域膨胀
  4. 图片的裁剪

灰度

// 以灰度方式,读取图片
Mat img = Imgcodecs.imread("D:\\Pic\\0.png", Imgcodecs.IMREAD_GRAYSCALE);
Imgcodecs.imwrite("D:\\Pic\\1.png", img);

二值化

// 转成二值化图片
Mat img2 = new Mat();
Imgproc.threshold(img, img2, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);
Imgcodecs.imwrite("D:\\Pic\\2.png", img2);

膨胀

// 膨胀
Mat img3 = OpenCVUtil.eroding(img2);
Imgcodecs.imwrite("D:\\Pic\\3.png", img3);

高斯模糊

Mat img01 = new Mat();
Imgproc.GaussianBlur(img, img01, new Size(1, 1), 10, 10);
Imgcodecs.imwrite("D:\\Pic\\img01.png", img01);

剪裁

// 截取左上角四分之一区域
Rect rect = new Rect(0, 0, img2.cols() / 2, img2.rows() / 2);
Mat img4 = new Mat(img2, rect);
Imgcodecs.imwrite("D:\\Pic\\4.png", img4);

工具类

通用

package utils.opencv;

import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.io.IOException;
import java.util.*;

import org.opencv.core.*;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;

public class OpenCVUtil {
  public static BufferedImage covertMat2Buffer(Mat mat) throws IOException {
    long time1 = new Date().getTime();
    // Mat 转byte数组
    BufferedImage originalB = toBufferedImage(mat);
    long time3 = new Date().getTime();
    System.out.println("保存读取方法2转=" + (time3 - time1));
    return originalB;
    // ImageIO.write(originalB, "jpg", new File("D:\\test\\testImge\\ws2.jpg"));
  }

  public static byte[] covertMat2Byte(Mat mat) throws IOException {
    long time1 = new Date().getTime();
    // Mat 转byte数组
    byte[] return_buff = new byte[(int) (mat.total() * mat.channels())];
    Mat mat1 = new Mat();
    mat1.get(0, 0, return_buff);
    long time3 = new Date().getTime();
    System.out.println(mat.total() * mat.channels());
    System.out.println("保存读取方法2转=" + (time3 - time1));
    return return_buff;
  }

  public static byte[] covertMat2Byte1(Mat mat) throws IOException {
    long time1 = new Date().getTime();
    MatOfByte mob = new MatOfByte();

    Imgcodecs.imencode(".jpg", mat, mob);

    long time3 = new Date().getTime();
    // System.out.println(mat.total() * mat.channels());
    System.out.println("Mat转byte[] 耗时=" + (time3 - time1));
    return mob.toArray();
  }

  public static BufferedImage toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
      type = BufferedImage.TYPE_3BYTE_BGR;
    }
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
  }

  /**
     * 腐蚀膨胀是针对于白色区域来说的,腐蚀即腐蚀白色区域
     * 腐蚀算法(黑色区域变大)
     *
     * @param source
     * @return
     */
  public static Mat eroding(Mat source) {
    return eroding(source, 1);
  }

  public static Mat eroding(Mat source, double erosion_size) {
    Mat resultMat = new Mat(source.rows(), source.cols(), source.type());
    Mat element = Imgproc.getStructuringElement(
      Imgproc.MORPH_RECT,
      new Size(erosion_size + 1, erosion_size + 1)
    );
    Imgproc.erode(source, resultMat, element);
    return resultMat;
  }

  /**
     * 腐蚀膨胀是针对于白色区域来说的,膨胀是膨胀白色区域
     * 膨胀算法(白色区域变大)
     *
     * @param source
     * @return
     */
  public static Mat dilation(Mat source) {
    return dilation(source, 1);
  }

  /**
     * 腐蚀膨胀是针对于白色区域来说的,膨胀是膨胀白色区域
     *
     * @param source
     * @param dilation_size 膨胀因子2*x+1 里的x
     * @return Mat
     */
  public static Mat dilation(Mat source, double dilation_size) {
    Mat resultMat = new Mat(source.rows(), source.cols(), source.type());
    Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * dilation_size + 1,
                                                                             2 * dilation_size + 1));
    Imgproc.dilate(source, resultMat, element);
    return resultMat;
  }
}

透视变换

package utils.opencv;

import org.opencv.core.*;
import org.opencv.imgproc.Imgproc;
import org.opencv.utils.Converters;

import java.util.Arrays;
import java.util.List;

/**
 * 透视变换工具类
 * 因为我透视变换做的也不是很好,就仅提供一个大概的函数...
 */
public class WarpPerspectiveUtils {

  /**
     * 透视变换
     *
     * @param src
     * @param points
     * @return
     */
  public static Mat warpPerspective(Mat src, Point[] points) {
    // 点的顺序[左上 ,右上 ,右下 ,左下]
    List<Point> listSrcs = Arrays.asList(
      points[0],
      points[1],
      points[2],
      points[3]
    );
    Mat srcPoints = Converters.vector_Point_to_Mat(listSrcs, CvType.CV_32F);

    List<Point> listDsts = Arrays.asList(
      new Point(0, 0),
      new Point(src.width(), 0),
      new Point(src.width(), src.height()),
      new Point(0, src.height())
    );


    Mat dstPoints = Converters.vector_Point_to_Mat(listDsts, CvType.CV_32F);

    Mat perspectiveMmat = Imgproc.getPerspectiveTransform(dstPoints, srcPoints);

    Mat dst = new Mat();

    Imgproc.warpPerspective(
      src,
      dst,
      perspectiveMmat,
      src.size(),
      Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP,
      1,
      new Scalar(0)
    );

    return dst;
  }
}

轮廓相关

package utils.opencv;

import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.imgproc.Imgproc;

import java.util.Vector;

/**
 * 轮廓工具类
 */
public class ContoursUtils {

  /**
     * 获取图片的四个顶点
     *
     * @param img
     * @return
     */
  public static Point[] getAllPoints(Mat img) {
    Point[] potArr = new Point[4];
    for (int i = 0; i < 4; i++) {
      potArr[i] = new Point(-1, -1);
    }

    int[] spaceArr = new int[]{-1, -1, -1, -1};
    int cols = img.cols();
    int rows = img.rows();
    int x1 = cols / 3;
    int x2 = cols * 2 / 3;
    int y1 = rows / 3;
    int y2 = rows * 2 / 3;
    for (int x = 0; x < cols; x++) {
      for (int y = 0; y < rows; y++) {
        if (x > x1 && x < x2 && y > y1 && y < y2) {
          continue;
        }
        double[] darr = img.get(y, x);
        if (darr != null && darr.length >= 1 && darr[0] == 0) {
          if (spaceArr[0] == -1) {
            potArr[0].x = x;
            potArr[0].y = y;
            potArr[1].x = x;
            potArr[1].y = y;
            potArr[2].x = x;
            potArr[2].y = y;
            potArr[3].x = x;
            potArr[3].y = y;
            spaceArr[0] = getSpace(0, 0, x, y);
            spaceArr[1] = getSpace(cols, 0, x, y);
            spaceArr[2] = getSpace(cols, rows, x, y);
            spaceArr[3] = getSpace(0, rows, x, y);
          } else {
            int s0 = getSpace(0, 0, x, y);
            int s1 = getSpace(cols, 0, x, y);
            int s2 = getSpace(cols, rows, x, y);
            int s3 = getSpace(0, rows, x, y);
            if (s0 < spaceArr[0]) {
              spaceArr[0] = s0;
              potArr[0].x = x;
              potArr[0].y = y;
            }
            if (s1 < spaceArr[1]) {
              spaceArr[1] = s1;
              potArr[1].x = x;
              potArr[1].y = y;
            }
            if (s2 < spaceArr[2]) {
              spaceArr[2] = s2;
              potArr[2].x = x;
              potArr[2].y = y;
            }
            if (s3 < spaceArr[3]) {
              spaceArr[3] = s3;
              potArr[3].x = x;
              potArr[3].y = y;
            }
          }

        }
      }
    }
    return potArr;
  }

  /**
     * 轮廓识别,使用最外轮廓发抽取轮廓RETR_EXTERNAL,轮廓识别方法为CHAIN_APPROX_SIMPLE
     *
     * @param source 传入进来的图片Mat对象
     * @return 返回轮廓结果集
     */
  public static Vector<MatOfPoint> findContours(Mat source) {
    Mat rs = new Mat();
    /**
         * 定义轮廓抽取模式
         *RETR_EXTERNAL:只检索最外面的轮廓;
         *RETR_LIST:检索所有的轮廓,并将其放入list中;
         *RETR_CCOMP:检索所有的轮廓,并将他们组织为两层:顶层是各部分的外部边界,第二层是空洞的边界;
         *RETR_TREE:检索所有的轮廓,并重构嵌套轮廓的整个层次。
         */
    /**
         * 定义轮廓识别方法
         * 边缘近似方法(除了RETR_RUNS使用内置的近似,其他模式均使用此设定的近似算法)。可取值如下:
         *CV_CHAIN_CODE:以Freeman链码的方式输出轮廓,所有其他方法输出多边形(顶点的序列)。
         *CHAIN_APPROX_NONE:将所有的连码点,转换成点。
         *CHAIN_APPROX_SIMPLE:压缩水平的、垂直的和斜的部分,也就是,函数只保留他们的终点部分。
         *CHAIN_APPROX_TC89_L1,CV_CHAIN_APPROX_TC89_KCOS:使用the flavors of Teh-Chin chain近似算法的一种。
         *LINK_RUNS:通过连接水平段的1,使用完全不同的边缘提取算法。使用CV_RETR_LIST检索模式能使用此方法。
         */
    Vector<MatOfPoint> contours = new Vector<MatOfPoint>();
    Imgproc.findContours(
      source,
      contours,
      rs,
      Imgproc.RETR_LIST,
      Imgproc.CHAIN_APPROX_SIMPLE
    );
    return contours;
  }

  /**
     * 计算两点之间的距离
     *
     * @param x1
     * @param y1
     * @param x2
     * @param y2
     * @return
     */
  private static int getSpace(int x1, int y1, int x2, int y2) {
    int xspace = Math.abs(x1 - x2);
    int yspace = Math.abs(y1 - y2);
    return (int) Math.sqrt(Math.pow(xspace, 2) + Math.pow(yspace, 2));
  }
}

处理全流程

import org.opencv.core.*;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import utils.opencv.ContoursUtils;
import utils.opencv.OpenCVUtil;
import utils.opencv.WarpPerspectiveUtils;

import java.util.Vector;


public class Test01 {
  static {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
  }

  public static void main(String[] args) {
    // 以灰度方式,读取图片
    Mat img = Imgcodecs.imread("D:\\Project\\Java\\opencv-demo01\\pic\\0.jpg", Imgcodecs.IMREAD_GRAYSCALE);
    Imgcodecs.imwrite("D:\\Project\\Java\\opencv-demo01\\pic\\1.png", img);

    // 转成二值化图片
    Mat img2 = new Mat();
    Imgproc.threshold(img, img2, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);
    Imgcodecs.imwrite("D:\\Project\\Java\\opencv-demo01\\pic\\2.png", img2);

    // 透视变形
    Mat img3 = WarpPerspectiveUtils.warpPerspective(img2, ContoursUtils.getAllPoints(img2));
    Imgcodecs.imwrite("D:\\Project\\Java\\opencv-demo01\\pic\\3.png", img3);

    // 膨胀
    Mat img4 = OpenCVUtil.eroding(img3);
    Imgcodecs.imwrite("D:\\Project\\Java\\opencv-demo01\\pic\\4.png", img4);

    // 截取选择题区域
    Rect rect = new Rect(68, 834, 1536, 220);
    Mat img5 = new Mat(img4, rect);
    Imgcodecs.imwrite("D:\\Project\\Java\\opencv-demo01\\pic\\5.png", img5);

    // 获取边界
    Vector<MatOfPoint> rectVec = ContoursUtils.findContours(img5);
    Vector<MatOfPoint> rectVec2 = new Vector<>();
    for (MatOfPoint matOfPoint : rectVec) {
      Rect rect2 = Imgproc.boundingRect(matOfPoint);
      if (rect2.width > 36 && rect2.height > 20 && rect2.width < 50 && rect2.height < 40) {
        rectVec2.add(matOfPoint);
      }
    }
    Mat img6 = new Mat(img5.rows(), img5.cols(), CvType.CV_8UC3, new Scalar(255, 255, 255));
    Imgproc.drawContours(img6, rectVec2, -1, new Scalar(0, 0, 255), 1);
    Imgcodecs.imwrite("D:\\Project\\Java\\opencv-demo01\\pic\\6.png", img6);
  }
}
最近发表
标签列表