Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ requires importing log dependencies, which are essential. Please import the depe
<!--<scope>provided</scope>-->
</dependency>
<!-- Log4j2 log real surface -->

<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
Expand All @@ -81,12 +82,14 @@ need to, you can refer to the following configuration.
<artifactId>mysql-connector-java</artifactId>
<version>8.0.30</version>
</dependency>

<!-- The dependency development package for the three major Spark modules can also be imported if you need to use it here, or not if you don't need it -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.12</artifactId>
<version>3.1.3</version>
</dependency>

<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.12</artifactId>
Expand All @@ -106,13 +109,13 @@ need to, you can refer to the following configuration.
<version>0.3.12</version>
</dependency>

<!-- HDFS input/output device dependency library. If you have a need for data reading and writing through the HDFS distributed storage platform, you can introduce this library. -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.3.1</version>
</dependency>
<!-- HDFS input/output device dependency library. If you have a need for data reading and writing through the HDFS distributed storage platform, you can introduce this library. -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.3.1</version>
</dependency>

</dependencies>
```

Expand Down
104 changes: 59 additions & 45 deletions src_code/Case.md
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,11 @@ public class MAIN1 {

### 图像模板匹配

- 人脸识别 通过人脸模板匹配曼哈顿相似系数
- 人脸识别

通过人脸模板匹配曼哈顿相似系数,进而得出人脸识别结果,在这里我们需要使用以下的图像作为样本,需要注意的是,该样本的轮廓为黑色,适用于背光情况下的人脸识别操作。

![YB](https://user-images.githubusercontent.com/113756063/230775389-4477aad4-795c-47c2-a946-0afeadafad44.jpg)

```java
package zhao.algorithmMagic;
Expand Down Expand Up @@ -173,7 +177,8 @@ import java.util.Map;
public class MAIN1 {
public static void main(String[] args) {
ColorMatrix colorMatrix1, colorMatrix2;
{ // 将图像与样本读取进来
{
// 将图像与样本读取进来
colorMatrix1 = ColorMatrix.parse("C:\\Users\\zhao\\Desktop\\fsdownload\\YB.bmp");
colorMatrix2 = ColorMatrix.parse("C:\\Users\\zhao\\Desktop\\fsdownload\\test22.jpg");
ColorMatrix temp = ColorMatrix.parse(colorMatrix2.copyToNewArrays());
Expand Down Expand Up @@ -216,7 +221,7 @@ public class MAIN1 {
```

```java
package zhao.algorithmMagic;
package zhao.run;

import zhao.algorithmMagic.algorithm.distanceAlgorithm.ManhattanDistance;
import zhao.algorithmMagic.io.InputCamera;
Expand All @@ -227,50 +232,59 @@ import zhao.algorithmMagic.operands.matrix.ColorMatrix;
import zhao.algorithmMagic.operands.table.FinalCell;

import java.awt.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Map;

public class MAIN1 {
public static void main(String[] args) {
ColorMatrix colorMatrix1, colorMatrix2;
{
// 获取到摄像头输入设备
InputComponent inputComponent = InputCamera.builder()
// 要使用的摄像头的名字 索引 或def默认,我们这里使用的是 def 代表使用默认摄像头
.addInputArg(InputCameraBuilder.Camera_Index, new FinalCell<>("def"))
// 要使用的拍摄图像格式
.addInputArg(InputCameraBuilder.Image_Format, new FinalCell<>("JPG"))
// 图像尺寸 这里的数值是 WebcamResolution 枚举类的属性字段 VGA
.addInputArg(InputCameraBuilder.CUSTOM_VIEW_SIZES, new FinalCell<>("VGA"))
.create();
// 将图像与样本读取进来
colorMatrix1 = ColorMatrix.parse("C:\\Users\\liming\\Desktop\\fsdownload\\YB.bmp");
colorMatrix2 = ColorMatrix.parse(inputComponent);
ColorMatrix temp = ColorMatrix.parse(colorMatrix2.copyToNewArrays());
// 开始二值化
colorMatrix1.localBinary(ColorMatrix._G_, 10, 0xffffff, 0, 1);
temp.localBinary(ColorMatrix._G_, 5, 0xffffff, 0, 20);
temp.erode(2, 2, false);
temp.show("temp");
// 开始进行模板匹配 并返回最匹配的结果数值,在这里返回的就是所有匹配的结果数据,key为匹配系数 value为匹配结果
Map.Entry<Double, IntegerCoordinateTwo> matching = temp.templateMatching(
ManhattanDistance.getInstance("MAN"),
colorMatrix1,
ColorMatrix._G_,
10,
false
);
// 开始进行绘制 在这里首先获取到坐标数据
IntegerCoordinateTwo coordinateTwo = matching.getValue();
System.out.print("匹配系数 = ");
System.out.println(matching.getKey());
colorMatrix2.drawRectangle(
coordinateTwo,
new IntegerCoordinateTwo(coordinateTwo.getX() + colorMatrix1.getColCount(), coordinateTwo.getY() + colorMatrix1.getRowCount()),
Color.MAGENTA
);
}
colorMatrix1.show("人脸样本");
colorMatrix2.show("识别结果");
public class Test {
public static void main(String[] args) throws MalformedURLException {
// 获取到摄像头数据据输入设备对象
InputComponent inputComponent = InputCamera.builder()
.addInputArg(InputCameraBuilder.Camera_Index, new FinalCell<>(0))
.addInputArg(InputCameraBuilder.Image_Format, new FinalCell<>("JPG"))
.create();
// 解析人脸轮廓样本,作为模板
ColorMatrix parse = ColorMatrix.parse(
new URL("https://user-images.githubusercontent.com/113756063/230775389-4477aad4-795c-47c2-a946-0afeadafad44.jpg")
);
// 从摄像头中读取一个图像,并将其加载成图像矩阵,并且进行一个备份操作。
ColorMatrix colorMatrix1 = ColorMatrix.parse(inputComponent);
ColorMatrix colorMatrix2 = ColorMatrix.parse(colorMatrix1.copyToNewArrays());
// 接下来针对 colorMatrix1 对象进行二值化,用于降低颜色对于结果的影响
colorMatrix1.localBinary(
// 二值化计算时需要使用的颜色通道
ColorMatrix._G_,
// 二值化计算时,由于采取的是局部二值化,需要指定子矩阵数量,在这里指定为10
10,
// 当颜色大于局部阈值的时候要变更的颜色数值
0xffffff,
// 当颜色小于局部阈值的时候要变更的颜色数值
0,
// 指定二值化操作时,所有局部颜色阈值的偏置
1
);
// 二值化结束就可以开始进行模板匹配了 匹配之后会返回一个最相似的矩阵的匹配系数以及左上角坐标
Map.Entry<Double, IntegerCoordinateTwo> entry = colorMatrix1.templateMatching(
// 指定需要进行模板匹配的算法
ManhattanDistance.getInstance("MAN"),
// 指定模板 也就是人脸样本数据
parse,
// 指定计算时使用的颜色通道
ColorMatrix._G_,
// 指定卷积步长,步长越大性能越高,越小精准度越高
12,
// 指定 相似系数 与 相似程度是否成正相关性 这里由于我们使用的时曼哈顿,因此为负相关,填 false
false
);
// 获取到左上角坐标
IntegerCoordinateTwo l = entry.getValue();
// 计算出右下角坐标
IntegerCoordinateTwo r = new IntegerCoordinateTwo(
l.getX() + parse.getColCount(), l.getY() + parse.getRowCount()
);
// 绘制到备份的原矩阵并展示图像
colorMatrix2.drawRectangle(l, r, new Color(255, 0, 255));
colorMatrix2.show("image");
}
}
```
Expand Down
2 changes: 1 addition & 1 deletion src_code/pom.xml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>io.github.BeardedManZhao</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,21 @@ final class SparkVector(sparkContext: SparkContext, vector: org.apache.spark.mll
else throw new OperatorOperationException("'DoubleVector1 add DoubleVector2' 时,两个'DoubleVector'的向量所包含的数量不同,DoubleVector1=[" + numberOfDimensions1 + "],DoubleVector2=[" + numberOfDimensions2 + "]\n" + "When 'DoubleVector1 add DoubleVector2', the two vectors of 'DoubleVector' contain different quantities, DoubleVector1=[" + numberOfDimensions1 + "], DoubleVector2=[" + numberOfDimensions2 + "]")
}

/**
*
* @return 将本对象中存储的向量序列数组拷贝到一个新数组并将新数组返回,这里返回的是一个新数组,支持修改等操作。
*
* Copy the vector sequence array stored in this object to a new array and return the new array. Here, a new array is returned, which supports modification and other operations.
*/
override def copyToNewArray(): Array[Double] = vector.toArray

/**
* @return 向量中包含的维度数量
* <p>
* the number of dimensions contained in the vector
*/
override def getNumberOfDimensions: Int = size

/**
* 在两个操作数之间做差的方法,具体用法请参阅API说明。
* <p>
Expand Down
8 changes: 4 additions & 4 deletions src_code/src/main/resources/log4j2.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
<Appenders>
<!-- 控制台输出 appender,SYSTEM_OUT输出黑色,SYSTEM_ERR输出红色 -->
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="[%p][%c][%d{yy-MM-dd:hh}]] : %m%n" />
<PatternLayout pattern="[%p][%c][%d{yy-MM-dd:hh}]] : %m%n"/>
</Console>

<!-- &lt;!&ndash; 日志文件输出 appender &ndash;&gt;-->
Expand All @@ -25,7 +25,7 @@

<!-- 使用随机读写流的日志文件输出 appender,性能提高 -->
<RandomAccessFile name="accessFile" fileName="${LOG_HOME}/algorithmStar.log">
<PatternLayout pattern="[%p][%c][%d{yy-MM-dd:hh:mm:ss}]] : %m%n" />
<PatternLayout pattern="[%p][%c][%d{yy-MM-dd:hh:mm:ss}]] : %m%n"/>
</RandomAccessFile>
</Appenders>

Expand All @@ -34,8 +34,8 @@
<!-- 使用 rootLogger 配置 日志级别 level="info" -->
<Root level="info">
<!-- 指定日志使用的处理器 -->
<AppenderRef ref="Console" />
<AppenderRef ref="accessFile" />
<AppenderRef ref="Console"/>
<AppenderRef ref="accessFile"/>
</Root>
</Loggers>
</configuration>