product
chuang 2 years ago
parent 2c05c79baf
commit 0ec6c1f28d

@ -126,71 +126,7 @@
</dependency>
<!-- 图像识别-->
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.5</version>
</dependency>
<!-- 服务器端推理引擎 -->
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>basicdataset</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- MXNet -->
<dependency>
<groupId>ai.djl.mxnet</groupId>
<artifactId>mxnet-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl.mxnet</groupId>
<artifactId>mxnet-engine</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- PaddlePaddle -->
<dependency>
<groupId>ai.djl.paddlepaddle</groupId>
<artifactId>paddlepaddle-engine</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl.paddlepaddle</groupId>
<artifactId>paddlepaddle-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.7</version>
</dependency>
<dependency>
<groupId>ai.djl.opencv</groupId>
<artifactId>opencv</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>

@ -1,19 +1,24 @@
package jnpf.collection.controller;
import cn.afterturn.easypoi.excel.ExcelExportUtil;
import cn.afterturn.easypoi.excel.entity.ExportParams;
import cn.afterturn.easypoi.excel.entity.params.ExcelExportEntity;
import cn.afterturn.easypoi.excel.entity.enmus.ExcelType;
import cn.afterturn.easypoi.excel.entity.params.ExcelExportEntity;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import jdk.nashorn.internal.ir.IfNode;
import jnpf.base.ActionResult;
import jnpf.base.vo.PageListVO;
import jnpf.base.vo.PaginationVO;
import jnpf.base.UserInfo;
import jnpf.base.vo.DownloadVO;
import jnpf.base.vo.PageListVO;
import jnpf.base.vo.PaginationVO;
import jnpf.collection.entity.CollectionEntity;
import jnpf.collection.entity.Collection_item0Entity;
import jnpf.collection.model.collection.*;
import jnpf.collection.service.CollectionService;
import jnpf.collection.service.Collection_item0Service;
import jnpf.config.ConfigValueUtil;
import jnpf.exception.DataException;
import jnpf.poundlist.entity.PoundlistEntity;
@ -22,46 +27,26 @@ import jnpf.saleorder.entity.SaleorderitemEntity;
import jnpf.saleorder.entity.Salesorder_item0Entity;
import jnpf.saleorder.service.SaleorderitemService;
import jnpf.saleorder.service.Salesorder_item0Service;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.ibatis.annotations.Param;
import org.bytedeco.opencv.presets.opencv_core;
import org.springframework.transaction.annotation.Transactional;
import jnpf.base.entity.ProvinceEntity;
import jnpf.collection.model.collection.*;
import jnpf.collection.model.collection.CollectionPagination;
import jnpf.collection.entity.*;
import jnpf.collection.entity.Collection_item0Entity;
import jnpf.util.*;
import jnpf.base.util.*;
import jnpf.base.vo.ListVO;
import jnpf.util.context.SpringContext;
import cn.hutool.core.util.ObjectUtil;
import lombok.extern.slf4j.Slf4j;
import jnpf.util.enums.FileTypeEnum;
import jnpf.util.file.UploadUtil;
import lombok.Cleanup;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.ibatis.annotations.Param;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Workbook;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import jnpf.collection.entity.CollectionEntity;
import jnpf.collection.service.CollectionService;
import jnpf.collection.entity.Collection_item0Entity;
import jnpf.collection.service.Collection_item0Service;
import org.springframework.web.bind.annotation.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import jnpf.util.GeneraterSwapUtil;
import java.math.BigDecimal;
import java.util.*;
import java.util.stream.Collectors;
import jnpf.util.file.UploadUtil;
import jnpf.util.enums.FileTypeEnum;
/**
*
* collection

@ -1,291 +0,0 @@
package jnpf.ocr_sdk;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.opencv.OpenCVImageFactory;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.TranslateException;
import jnpf.ocr_sdk.utils.common.ImageUtils;
import jnpf.ocr_sdk.utils.common.RotatedBox;
import jnpf.ocr_sdk.utils.detection.OcrV3Detection;
import jnpf.ocr_sdk.utils.opencv.OpenCVUtils;
import jnpf.ocr_sdk.utils.recognition.OcrV3Recognition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
/**
* OCR V3 .
*
* @author Calvin
* @date 2022-10-07
* @email 179209347@qq.com
*/
public final class OcrV3RecognitionExample {
private static final Logger logger = LoggerFactory.getLogger(OcrV3RecognitionExample.class);
private OcrV3RecognitionExample() {
}
public static ArrayList<String> getExample(List<RotatedBox> rotatedBoxes,ArrayList<String> columnNames,HashMap<String, Integer> columnMap){
String info="";
ArrayList<String> list = new ArrayList<>();
//格子的识别的内容
ArrayList<HashMap<String,Object>> boxInfo = new ArrayList<>();
// 识别内容的坐标
HashMap<Integer, HashMap<String, Object>> xyInfo = new HashMap<>();
for (int i = 0; i < rotatedBoxes.size(); i++) {
RotatedBox box = rotatedBoxes.get(i);
HashMap<String, Object> map = new HashMap<>();
map.put("Text",box.getText());
int[] boxXY = OcrV3RecognitionExample.getBoxXY(box.getBox());
map.put("xy",boxXY);
map.put("index",i);
// y的坐标
HashMap<String, Object> xYMap = new HashMap<>();
xYMap.put("x1",boxXY[0]);
xYMap.put("x2",boxXY[1]);
xYMap.put("y1",boxXY[5]);
xYMap.put("y4",boxXY[8]);
xYMap.put("Text",box.getText());
boxInfo.add(map);
xyInfo.put(i,xYMap);
}
// 获取索引根据索引获取到他的y的信息根据y的信息进行匹配,匹配后存储成数组然后进行x的验证x距离最近责优先匹配
for (int i = 0; i < boxInfo.size(); i++) {
HashMap<String, Object> map = boxInfo.get(i);
String text=map.get("Text")!=null?map.get("Text").toString():"null";
for (int i1 = 0; i1 < columnNames.size(); i1++) {
String s = columnNames.get(i1);
if (text.contains(s)){
Integer integer = columnMap.get(s);
switch (integer){
case 1:
list.add(getRight(map,xyInfo,1));
break;
case 2:
list.add(getRight(map,xyInfo,-1));
break;
case -1:
list.add(getDown(map,xyInfo,1));
break;
case -2:
list.add(getDown(map,xyInfo,-1));
break;
case 0:
list.add(getRight(map,xyInfo,0));
break;
default:
break;
}
}
}
}
return list;
}
public static String getDown(HashMap<String, Object> map,HashMap<Integer, HashMap<String, Object>> xyInfo,int to){
int index =(int) map.get("index");
HashMap<String, Object> xYMap = xyInfo.get(index);
Integer x1 =(int) xYMap.get("x1");
Integer x2 = (int)xYMap.get("x2");
Integer y1 = (int)xYMap.get("y1");
// 识别的内容
HashMap<Integer, String> textMap = new HashMap<>();
// 索引数组
ArrayList<Integer> indexArrayList = new ArrayList<>();
for (Integer setMap:xyInfo.keySet()) {
HashMap<String, Object> hashMap = xyInfo.get(setMap);
Integer x11 =(int) hashMap.get("x1");
Integer x22 =(int) hashMap.get("x2");
boolean b = (x1 <= x11 && x22 >= x11) || (x1 <= x22 && x2 >= x22);
if (b){
textMap.put((int)hashMap.get("y1"),hashMap.get("Text")!=null?hashMap.get("Text").toString():null);
indexArrayList.add((int)hashMap.get("y1"));
}
}
Integer[] arr=indexArrayList.toArray(new Integer[0]);
for(int i1=0;i1<arr.length-1;i1++){
for(int j=0;j<arr.length-1-i1;j++){
if(arr[j]>arr[j+1]){
int temp=arr[j+1];
arr[j+1]=arr[j];
arr[j]=temp;
}
}
}
//从左到右后一个格子的数据
int i = Arrays.binarySearch(arr, y1) + to;
return textMap.get(arr[i]);
}
/**
*
* @author
* @date 2023/2/1 16:25
* @param map
* @param xyInfo
* @param to + 1+ -1
* @return String
*/
public static String getRight(HashMap<String, Object> map,HashMap<Integer, HashMap<String, Object>> xyInfo,int to){
int index =(int) map.get("index");
HashMap<String, Object> xYMap = xyInfo.get(index);
Integer y1 =(int) xYMap.get("y1");
Integer y4 = (int)xYMap.get("y4");
Integer x1 = (int)xYMap.get("x1");
HashMap<Integer, String> textMap = new HashMap<>();
ArrayList<Integer> indexArrayList = new ArrayList<>();
for (Integer setMap:xyInfo.keySet()) {
HashMap<String, Object> hashMap = xyInfo.get(setMap);
Integer y11 =(int) hashMap.get("y1");
Integer y44 =(int) hashMap.get("y4");
boolean b = (y1 <= y11 && y4 >= y11) || (y1 <= y44 && y4 >= y44);
if (b){
textMap.put((int)hashMap.get("x1"),hashMap.get("Text")!=null?hashMap.get("Text").toString():null);
indexArrayList.add((int)hashMap.get("x1"));
}
}
Integer[] arr=indexArrayList.toArray(new Integer[0]);
for(int i1=0;i1<arr.length-1;i1++){
for(int j=0;j<arr.length-1-i1;j++){
if(arr[j]>arr[j+1]){
int temp=arr[j+1];
arr[j+1]=arr[j];
arr[j]=temp;
}
}
}
//从左到右后一个格子的数据
int i = Arrays.binarySearch(arr, x1) + to;
return textMap.get(arr[i]);
}
/**
* box
* @author
* @date 2023/2/1 10:10
* @param box
* @return int[]
*/
public static int[] getBoxXY( NDArray box) {
float[] points = box.toFloatArray();
int[] xYPoints = new int[10];
for (int i = 0; i < 4; i++) {
xYPoints[i] = (int) points[2 * i];
xYPoints[i+5] = (int) points[2 * i + 1];
}
xYPoints[4] = xYPoints[0];
xYPoints[4+5] = xYPoints[0+5];
return xYPoints;
}
public static List<RotatedBox> ocrAI(InputStream picture) throws IOException, ModelException, TranslateException {
StringBuffer ocrStr = new StringBuffer("本次识别的内容:");
Image image = OpenCVImageFactory.getInstance().fromInputStream(picture);
;
OcrV3Detection detection = new OcrV3Detection();
OcrV3Recognition recognition = new OcrV3Recognition();
ZooModel detectionModel = ModelZoo.loadModel(detection.detectCriteria());
Predictor<Image, NDList> detector = detectionModel.newPredictor();
ZooModel recognitionModel = ModelZoo.loadModel(recognition.recognizeCriteria());
Predictor<Image, String> recognizer = recognitionModel.newPredictor() ;
long timeInferStart = System.currentTimeMillis();
List<RotatedBox> detections = recognition.predict(image, detector, recognizer);
// for (int i = 0; i < 1000; i++) {
// detections = recognition.predict(image, detector, recognizer);
// System.out.println("time: " + i);
// }
long timeInferEnd = System.currentTimeMillis();
System.out.println("time: " + (timeInferEnd - timeInferStart));
for (RotatedBox result : detections) {
System.out.println(result.getText());
ocrStr.append(result.getText());
}
// 根据关键子获取到box的信息再根据方位来匹配box,根据box的线的匹配box,获取box中识别出的内容
BufferedImage bufferedImage = OpenCVUtils.mat2Image((org.opencv.core.Mat) image.getWrappedImage());
for (RotatedBox result : detections) {
NDArray box = result.getBox();
ImageUtils.drawImageRectWithText(bufferedImage, result.getBox(), result.getText());
}
image = ImageFactory.getInstance().fromImage(OpenCVUtils.image2Mat(bufferedImage));
ImageUtils.saveImage(image, "ocr_result.png", "build/output");
logger.info("{}", detections);
logger.info(ocrStr.toString());
return detections;
}
public static void main(String[] args) throws IOException, ModelException, TranslateException {
// Path imageFile = Paths.get("src/test/resources/7.jpg");
String relativelyPath=System.getProperty("user.dir");
// Path imageFile = Paths.get("C:\\Users\\admin\\Desktop\\图像\\AISDK\\AIAS\\1_image_sdks\\text_recognition\\ocr_sdk\\src\\test\\resources\\7.jpg");
// Path imageFile = Paths.get("src/test/resources/7.jpg");
StringBuffer ocrStr = new StringBuffer("本次识别的内容:");
Path imageFile = Paths.get("C:/Users/admin/Desktop/AAAA.png");
Image image = OpenCVImageFactory.getInstance().fromFile(imageFile);
OcrV3Detection detection = new OcrV3Detection();
OcrV3Recognition recognition = new OcrV3Recognition();
try (ZooModel detectionModel = ModelZoo.loadModel(detection.detectCriteria());
Predictor<Image, NDList> detector = detectionModel.newPredictor();
ZooModel recognitionModel = ModelZoo.loadModel(recognition.recognizeCriteria());
Predictor<Image, String> recognizer = recognitionModel.newPredictor()) {
long timeInferStart = System.currentTimeMillis();
List<RotatedBox> detections = recognition.predict(image, detector, recognizer);
// for (int i = 0; i < 1000; i++) {
// detections = recognition.predict(image, detector, recognizer);
// System.out.println("time: " + i);
// }
long timeInferEnd = System.currentTimeMillis();
System.out.println("time: " + (timeInferEnd - timeInferStart));
for (RotatedBox result : detections) {
System.out.println(result.getText());
ocrStr.append(result.getText());
}
BufferedImage bufferedImage = OpenCVUtils.mat2Image((org.opencv.core.Mat) image.getWrappedImage());
for (RotatedBox result : detections) {
ImageUtils.drawImageRectWithText(bufferedImage, result.getBox(), result.getText());
}
image = ImageFactory.getInstance().fromImage(OpenCVUtils.image2Mat(bufferedImage));
ImageUtils.saveImage(image, "ocr_result.png", "build/output");
logger.info("{}", detections);
logger.info(ocrStr.toString());
}
}
}

@ -1,7 +1,6 @@
package jnpf.ocr_sdk.controller;
import ai.djl.ModelException;
import ai.djl.translate.TranslateException;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import jnpf.base.ActionResult;
@ -28,14 +27,14 @@ import java.io.IOException;
public class BaiduOcrController {
@ApiOperation("发票识别")
@PostMapping("/uPicture")
public ActionResult UploadPicture(MultipartFile file ) throws IOException, ModelException, TranslateException {
public ActionResult UploadPicture(MultipartFile file ) throws IOException {
String s = VatInvoice.vatInvoice(file);
return ActionResult.success(JsonUtil.stringToMap(s));
}
@ApiOperation("榜单识别")
@PostMapping("/weightNote")
public ActionResult weightNote(MultipartFile file ) throws IOException, ModelException, TranslateException {
public ActionResult weightNote(MultipartFile file ) throws IOException {
String s = VatInvoice.weightNote(file);
return ActionResult.success(JsonUtil.stringToMap(s));
}

@ -1,81 +0,0 @@
package jnpf.ocr_sdk.controller;
import ai.djl.ModelException;
import ai.djl.translate.TranslateException;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import jnpf.base.ActionResult;
import jnpf.ocr_sdk.OcrV3RecognitionExample;
import jnpf.ocr_sdk.utils.common.RotatedBox;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/**
* @Author: WangChuang
* @Date: 10/2/2023 9:06
* @Description //注释:
* @Version 1.0
*/
@Slf4j
@RestController
@Api(tags = "AI识别图像API" , value = "AI识别图像API")
@RequestMapping("/api/OcrAPI/OcrAPI")
public class OcrController {
// @Resource
// private OcrV3RecognitionExample ocrV3RecognitionExample;
@ApiOperation("图片上传")
@PostMapping("/uPicture")
public ActionResult UploadPicture(MultipartFile file ) throws IOException, ModelException, TranslateException {
String fileName = file.getOriginalFilename();
System.out.println(fileName);
//调用工具类的方法
ArrayList<String> columnNames = new ArrayList<>();
HashMap<String, Integer> columnMap = new HashMap<>();
// 发票代码
columnNames.add("No");
// 1:从左到右+1
// 2:从左到右-1
// -1:从上到下+1
// -2:从上到下-1
// 0:本身自己
columnMap.put("No",0);
// 发票号码
columnNames.add("机器编号");
columnMap.put("机器编号",-1);
// 发票数量
columnNames.add("量");
columnMap.put("量",-1);
// 发票金额
columnNames.add("小写");
columnMap.put("小写",-2);
// 税率
columnNames.add("税率");
columnMap.put("税率",-1);
// 税额 (用税率二次)
// 不含税金额 (金额减去税额)
// 开票日期
columnNames.add("开票日期:");
columnMap.put("开票日期:",0);
// 物料名称
columnNames.add("货物或应税劳务");
columnMap.put("货物或应税劳务",-1);
List<RotatedBox> rotatedBoxes = OcrV3RecognitionExample.ocrAI(file.getInputStream());
ArrayList<String> example = OcrV3RecognitionExample.getExample(rotatedBoxes, columnNames,columnMap);
return ActionResult.success(example);
}
}

@ -1,141 +0,0 @@
package jnpf.ocr_sdk.utils.cls;
import ai.djl.inference.Predictor;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import jnpf.ocr_sdk.utils.detection.PpWordDetectionTranslator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
public final class OcrDirectionDetection {
private static final Logger logger = LoggerFactory.getLogger(OcrDirectionDetection.class);
public OcrDirectionDetection() {}
public DetectedObjects predict(
Image image,
Predictor<Image, DetectedObjects> detector,
Predictor<Image, Classifications> rotateClassifier)
throws TranslateException {
DetectedObjects detections = detector.predict(image);
List<DetectedObjects.DetectedObject> boxes = detections.items();
List<String> names = new ArrayList<>();
List<Double> prob = new ArrayList<>();
List<BoundingBox> rect = new ArrayList<>();
for (int i = 0; i < boxes.size(); i++) {
Image subImg = getSubImage(image, boxes.get(i).getBoundingBox());
Classifications.Classification result = null;
if (subImg.getHeight() * 1.0 / subImg.getWidth() > 1.5) {
subImg = rotateImg(subImg);
result = rotateClassifier.predict(subImg).best();
prob.add(result.getProbability());
if (result.getClassName().equalsIgnoreCase("Rotate")) {
names.add("90");
} else {
names.add("270");
}
} else {
result = rotateClassifier.predict(subImg).best();
prob.add(result.getProbability());
if (result.getClassName().equalsIgnoreCase("No Rotate")) {
names.add("0");
} else {
names.add("180");
}
}
rect.add(boxes.get(i).getBoundingBox());
}
DetectedObjects detectedObjects = new DetectedObjects(names, prob, rect);
return detectedObjects;
}
public Criteria<Image, DetectedObjects> detectCriteria() {
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_PP-OCRv2_det_infer.zip")
// .optModelUrls(
// "/Users/calvin/Documents/build/paddle_models/ppocr/ch_PP-OCRv2_det_infer")
// .optDevice(Device.cpu())
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
return criteria;
}
public Criteria<Image, Classifications> clsCriteria() {
Criteria<Image, Classifications> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, Classifications.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_ppocr_mobile_v2.0_cls_infer.zip")
// .optModelUrls(
// "/Users/calvin/Documents/build/paddle_models/ppocr/ch_ppocr_mobile_v2.0_cls_infer")
.optTranslator(new PpWordRotateTranslator())
.optProgress(new ProgressBar())
.build();
return criteria;
}
private Image getSubImage(Image img, BoundingBox box) {
Rectangle rect = box.getBounds();
double[] extended = extendRect(rect.getX(), rect.getY(), rect.getWidth(), rect.getHeight());
int width = img.getWidth();
int height = img.getHeight();
int[] recovered = {
(int) (extended[0] * width),
(int) (extended[1] * height),
(int) (extended[2] * width),
(int) (extended[3] * height)
};
return img.getSubImage(recovered[0], recovered[1], recovered[2], recovered[3]);
}
private double[] extendRect(double xmin, double ymin, double width, double height) {
double centerx = xmin + width / 2;
double centery = ymin + height / 2;
if (width > height) {
width += height * 2.0;
height *= 3.0;
} else {
height += width * 2.0;
width *= 3.0;
}
double newX = centerx - width / 2 < 0 ? 0 : centerx - width / 2;
double newY = centery - height / 2 < 0 ? 0 : centery - height / 2;
double newWidth = newX + width > 1 ? 1 - newX : width;
double newHeight = newY + height > 1 ? 1 - newY : height;
return new double[] {newX, newY, newWidth, newHeight};
}
private Image rotateImg(Image image) {
try (NDManager manager = NDManager.newBaseManager()) {
NDArray rotated = NDImageUtils.rotate90(image.toNDArray(manager), 1);
return ImageFactory.getInstance().fromNDArray(rotated);
}
}
}

@ -1,69 +0,0 @@
package jnpf.ocr_sdk.utils.cls;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.ndarray.types.Shape;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.util.Arrays;
import java.util.List;
public class PpWordRotateTranslator implements Translator<Image, Classifications> {
List<String> classes = Arrays.asList("No Rotate", "Rotate");
public PpWordRotateTranslator() {}
public Classifications processOutput(TranslatorContext ctx, NDList list) {
NDArray prob = list.singletonOrThrow();
return new Classifications(this.classes, prob);
}
public NDList processInput(TranslatorContext ctx, Image input) throws Exception {
NDArray img = input.toNDArray(ctx.getNDManager());
img = NDImageUtils.resize(img, 192, 48);
img = NDImageUtils.toTensor(img).sub(0.5F).div(0.5F);
img = img.expandDims(0);
return new NDList(new NDArray[]{img});
}
public NDList processInputBak(TranslatorContext ctx, Image input) throws Exception {
NDArray img = input.toNDArray(ctx.getNDManager());
int imgC = 3;
int imgH = 48;
int imgW = 192;
NDArray array = ctx.getNDManager().zeros(new Shape(imgC, imgH, imgW));
int h = input.getHeight();
int w = input.getWidth();
int resized_w = 0;
float ratio = (float) w / (float) h;
if (Math.ceil(imgH * ratio) > imgW) {
resized_w = imgW;
} else {
resized_w = (int) (Math.ceil(imgH * ratio));
}
img = NDImageUtils.resize(img, resized_w, imgH);
img = NDImageUtils.toTensor(img).sub(0.5F).div(0.5F);
// img = img.transpose(2, 0, 1);
array.set(new NDIndex(":,:,0:" + resized_w), img);
array = array.expandDims(0);
return new NDList(new NDArray[] {array});
}
public Batchifier getBatchifier() {
return null;
}
}

@ -1,99 +0,0 @@
package jnpf.ocr_sdk.utils.common;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.DetectedObjects;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
public class DJLImageUtils {
public static Image bufferedImage2DJLImage(BufferedImage img) {
return ImageFactory.getInstance().fromImage(img);
}
public static void saveImage(BufferedImage img, String name, String path) {
Image djlImg = ImageFactory.getInstance().fromImage(img); // 支持多种图片格式,自动适配
Path outputDir = Paths.get(path);
Path imagePath = outputDir.resolve(name);
// OpenJDK 不能保存 jpg 图片的 alpha channel
try {
djlImg.save(Files.newOutputStream(imagePath), "png");
} catch (IOException e) {
e.printStackTrace();
}
}
public static void saveDJLImage(Image img, String name, String path) {
Path outputDir = Paths.get(path);
Path imagePath = outputDir.resolve(name);
// OpenJDK 不能保存 jpg 图片的 alpha channel
try {
img.save(Files.newOutputStream(imagePath), "png");
} catch (IOException e) {
e.printStackTrace();
}
}
public static void saveBoundingBoxImage(
Image img, DetectedObjects detection, String name, String path) throws IOException {
// Make imageName copy with alpha channel because original imageName was jpg
img.drawBoundingBoxes(detection);
Path outputDir = Paths.get(path);
Files.createDirectories(outputDir);
Path imagePath = outputDir.resolve(name);
// OpenJDK can't save jpg with alpha channel
img.save(Files.newOutputStream(imagePath), "png");
}
public static void drawImageRect(BufferedImage image, int x, int y, int width, int height) {
// 将绘制图像转换为Graphics2D
Graphics2D g = (Graphics2D) image.getGraphics();
try {
g.setColor(new Color(246, 96, 0));
// 声明画笔属性 :粗 细(单位像素)末端无修饰 折线处呈尖角
BasicStroke bStroke = new BasicStroke(4, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER);
g.setStroke(bStroke);
g.drawRect(x, y, width, height);
} finally {
g.dispose();
}
}
public static void drawImageRect(
BufferedImage image, int x, int y, int width, int height, Color c) {
// 将绘制图像转换为Graphics2D
Graphics2D g = (Graphics2D) image.getGraphics();
try {
g.setColor(c);
// 声明画笔属性 :粗 细(单位像素)末端无修饰 折线处呈尖角
BasicStroke bStroke = new BasicStroke(4, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER);
g.setStroke(bStroke);
g.drawRect(x, y, width, height);
} finally {
g.dispose();
}
}
public static void drawImageText(BufferedImage image, String text) {
Graphics graphics = image.getGraphics();
int fontSize = 100;
Font font = new Font("楷体", Font.PLAIN, fontSize);
try {
graphics.setFont(font);
graphics.setColor(new Color(246, 96, 0));
int strWidth = graphics.getFontMetrics().stringWidth(text);
graphics.drawString(text, fontSize - (strWidth / 2), fontSize + 30);
} finally {
graphics.dispose();
}
}
}

@ -1,48 +0,0 @@
package jnpf.ocr_sdk.utils.common;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.BoundingBox;
public class ImageInfo {
private String name;
private Double prob;
private Image image;
private BoundingBox box;
public ImageInfo(Image image, BoundingBox box) {
this.image = image;
this.box = box;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Double getProb() {
return prob;
}
public void setProb(Double prob) {
this.prob = prob;
}
public Image getImage() {
return image;
}
public void setImage(Image image) {
this.image = image;
}
public BoundingBox getBox() {
return box;
}
public void setBox(BoundingBox box) {
this.box = box;
}
}

@ -1,262 +0,0 @@
package jnpf.ocr_sdk.utils.common;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.ndarray.NDArray;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
public class ImageUtils {
/**
* BufferedImageDJL
*
* @author Calvin
*/
public static Image convert(BufferedImage img) {
return ImageFactory.getInstance().fromImage(img);
}
/**
* BufferedImage
*
* @author Calvin
*/
public static void saveImage(BufferedImage img, String name, String path) {
Image djlImg = ImageFactory.getInstance().fromImage(img); // 支持多种图片格式,自动适配
Path outputDir = Paths.get(path);
Path imagePath = outputDir.resolve(name);
// OpenJDK 不能保存 jpg 图片的 alpha channel
try {
djlImg.save(Files.newOutputStream(imagePath), "png");
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* DJL
*
* @author Calvin
*/
public static void saveImage(Image img, String name, String path) {
Path outputDir = Paths.get(path);
Path imagePath = outputDir.resolve(name);
// OpenJDK 不能保存 jpg 图片的 alpha channel
try {
img.save(Files.newOutputStream(imagePath), "png");
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* ,
*
* @author Calvin
*/
public static void saveBoundingBoxImage(
Image img, DetectedObjects detection, String name, String path) throws IOException {
// Make image copy with alpha channel because original image was jpg
img.drawBoundingBoxes(detection);
Path outputDir = Paths.get(path);
Files.createDirectories(outputDir);
Path imagePath = outputDir.resolve(name);
// OpenJDK can't save jpg with alpha channel
img.save(Files.newOutputStream(imagePath), "png");
}
/**
*
*
* @author Calvin
*/
public static void drawLandmark(Image img, BoundingBox box, float[] array) {
for (int i = 0; i < array.length / 2; i++) {
int x = getX(img, box, array[2 * i]);
int y = getY(img, box, array[2 * i + 1]);
Color c = new Color(0, 255, 0);
drawImageRect((BufferedImage) img.getWrappedImage(), x, y, 1, 1, c);
}
}
/**
* ()
*
* @author Calvin
*/
public static void drawImageRect(BufferedImage image, NDArray box) {
float[] points = box.toFloatArray();
int[] xPoints = new int[5];
int[] yPoints = new int[5];
for (int i = 0; i < 4; i++) {
xPoints[i] = (int) points[2 * i];
yPoints[i] = (int) points[2 * i + 1];
}
xPoints[4] = xPoints[0];
yPoints[4] = yPoints[0];
// 将绘制图像转换为Graphics2D
Graphics2D g = (Graphics2D) image.getGraphics();
try {
g.setColor(new Color(0, 255, 0));
// 声明画笔属性 :粗 细(单位像素)末端无修饰 折线处呈尖角
BasicStroke bStroke = new BasicStroke(4, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER);
g.setStroke(bStroke);
g.drawPolyline(xPoints, yPoints, 5); // xPoints, yPoints, nPoints
} finally {
g.dispose();
}
}
/**
* ()
*
* @author Calvin
*/
public static void drawImageRectWithText(BufferedImage image, NDArray box, String text) {
float[] points = box.toFloatArray();
int[] xPoints = new int[5];
int[] yPoints = new int[5];
for (int i = 0; i < 4; i++) {
xPoints[i] = (int) points[2 * i];
yPoints[i] = (int) points[2 * i + 1];
}
xPoints[4] = xPoints[0];
yPoints[4] = yPoints[0];
// 将绘制图像转换为Graphics2D
Graphics2D g = (Graphics2D) image.getGraphics();
try {
int fontSize = 32;
Font font = new Font("楷体", Font.PLAIN, fontSize);
g.setFont(font);
g.setColor(new Color(0, 0, 255));
// 声明画笔属性 :粗 细(单位像素)末端无修饰 折线处呈尖角
BasicStroke bStroke = new BasicStroke(2, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER);
g.setStroke(bStroke);
g.drawPolyline(xPoints, yPoints, 5); // xPoints, yPoints, nPoints
g.drawString(text, xPoints[0], yPoints[0]);
} finally {
g.dispose();
}
}
/**
*
*
* @author Calvin
*/
public static void drawImageRect(BufferedImage image, int x, int y, int width, int height) {
// 将绘制图像转换为Graphics2D
Graphics2D g = (Graphics2D) image.getGraphics();
try {
g.setColor(new Color(0, 255, 0));
// 声明画笔属性 :粗 细(单位像素)末端无修饰 折线处呈尖角
BasicStroke bStroke = new BasicStroke(2, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER);
g.setStroke(bStroke);
g.drawRect(x, y, width, height);
} finally {
g.dispose();
}
}
/**
*
*
* @author Calvin
*/
public static void drawImageRect(
BufferedImage image, int x, int y, int width, int height, Color c) {
// 将绘制图像转换为Graphics2D
Graphics2D g = (Graphics2D) image.getGraphics();
try {
g.setColor(c);
// 声明画笔属性 :粗 细(单位像素)末端无修饰 折线处呈尖角
BasicStroke bStroke = new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER);
g.setStroke(bStroke);
g.drawRect(x, y, width, height);
} finally {
g.dispose();
}
}
/**
*
*
* @author Calvin
*/
public static void drawImageText(BufferedImage image, String text, int x, int y) {
Graphics graphics = image.getGraphics();
int fontSize = 32;
Font font = new Font("楷体", Font.PLAIN, fontSize);
try {
graphics.setFont(font);
graphics.setColor(new Color(0, 0, 255));
int strWidth = graphics.getFontMetrics().stringWidth(text);
graphics.drawString(text, x, y);
} finally {
graphics.dispose();
}
}
/**
* factor = 1, 100%, factor = 0.2, 20%
*
* @author Calvin
*/
public static Image getSubImage(Image img, BoundingBox box, float factor) {
Rectangle rect = box.getBounds();
// 左上角坐标
int x1 = (int) (rect.getX() * img.getWidth());
int y1 = (int) (rect.getY() * img.getHeight());
// 宽度,高度
int w = (int) (rect.getWidth() * img.getWidth());
int h = (int) (rect.getHeight() * img.getHeight());
// 左上角坐标
int x2 = x1 + w;
int y2 = y1 + h;
// 外扩大100%,防止对齐后人脸出现黑边
int new_x1 = Math.max((int) (x1 + x1 * factor / 2 - x2 * factor / 2), 0);
int new_x2 = Math.min((int) (x2 + x2 * factor / 2 - x1 * factor / 2), img.getWidth() - 1);
int new_y1 = Math.max((int) (y1 + y1 * factor / 2 - y2 * factor / 2), 0);
int new_y2 = Math.min((int) (y2 + y2 * factor / 2 - y1 * factor / 2), img.getHeight() - 1);
int new_w = new_x2 - new_x1;
int new_h = new_y2 - new_y1;
return img.getSubImage(new_x1, new_y1, new_w, new_h);
}
private static int getX(Image img, BoundingBox box, float x) {
Rectangle rect = box.getBounds();
// 左上角坐标
int x1 = (int) (rect.getX() * img.getWidth());
// 宽度
int w = (int) (rect.getWidth() * img.getWidth());
return (int) (x * w + x1);
}
private static int getY(Image img, BoundingBox box, float y) {
Rectangle rect = box.getBounds();
// 左上角坐标
int y1 = (int) (rect.getY() * img.getHeight());
// 高度
int h = (int) (rect.getHeight() * img.getHeight());
return (int) (y * h + y1);
}
}

@ -1,29 +0,0 @@
package jnpf.ocr_sdk.utils.common;
import ai.djl.ndarray.NDArray;
public class RotatedBox {
private NDArray box;
private String text;
public RotatedBox(NDArray box, String text) {
this.box = box;
this.text = text;
}
public NDArray getBox() {
return box;
}
public void setBox(NDArray box) {
this.box = box;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
}

@ -1,574 +0,0 @@
package jnpf.ocr_sdk.utils.detection;
import ai.djl.modality.cv.BufferedImageFactory;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import org.bytedeco.javacpp.indexer.FloatRawIndexer;
import org.bytedeco.javacpp.indexer.IntRawIndexer;
import org.bytedeco.javacpp.indexer.UByteRawIndexer;
import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.*;
import org.opencv.core.CvType;
import java.util.Map;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
public class OCRDetectionTranslator implements Translator<Image, NDList> {
private Image image;
private final int max_side_len;
private final int max_candidates;
private final int min_size;
private final float box_thresh;
private final float unclip_ratio;
private float ratio_h;
private float ratio_w;
private int img_height;
private int img_width;
public OCRDetectionTranslator(Map<String, ?> arguments) {
max_side_len =
arguments.containsKey("max_side_len")
? Integer.parseInt(arguments.get("max_side_len").toString())
: 960;
max_candidates =
arguments.containsKey("max_candidates")
? Integer.parseInt(arguments.get("max_candidates").toString())
: 1000;
min_size =
arguments.containsKey("min_size")
? Integer.parseInt(arguments.get("min_size").toString())
: 3;
box_thresh =
arguments.containsKey("box_thresh")
? Float.parseFloat(arguments.get("box_thresh").toString())
: 0.5f;
unclip_ratio =
arguments.containsKey("unclip_ratio")
? Float.parseFloat(arguments.get("unclip_ratio").toString())
: 1.6f;
}
@Override
public NDList processOutput(TranslatorContext ctx, NDList list) {
NDManager manager = ctx.getNDManager();
NDArray pred = list.singletonOrThrow();
pred = pred.squeeze();
NDArray segmentation = pred.toType(DataType.UINT8, true).gt(0.3); // thresh=0.3 .mul(255f)
segmentation = segmentation.toType(DataType.UINT8, true);
//convert from NDArray to Mat
byte[] byteArray = segmentation.toByteArray();
Shape shape = segmentation.getShape();
int rows = (int) shape.get(0);
int cols = (int) shape.get(1);
Mat srcMat = new Mat(rows, cols, CvType.CV_8U);
UByteRawIndexer ldIdx = srcMat.createIndexer();
for (int row = 0; row < rows; row++) {
for (int col = 0; col < cols; col++) {
ldIdx.put(row, col, byteArray[row * cols + col]);
}
}
ldIdx.release();
Mat mask = new Mat();
// size 越小,腐蚀的单位越小,图片越接近原图
Mat structImage =
opencv_imgproc.getStructuringElement(opencv_imgproc.MORPH_RECT, new Size(2, 2));
/**
* `` ``''
* 使
*/
opencv_imgproc.dilate(srcMat, mask, structImage);
ldIdx = mask.createIndexer();
for (int row = 0; row < rows; row++) {
for (int col = 0; col < cols; col++) {
ldIdx.put(row, col, ldIdx.get(row, col) * 255);
}
}
ldIdx.release();
NDArray boxes = boxes_from_bitmap(manager, pred, mask, box_thresh);
//boxes[:, :, 0] = boxes[:, :, 0] / ratio_w
NDArray boxes1 = boxes.get(":, :, 0").div(ratio_w);
boxes.set(new NDIndex(":, :, 0"), boxes1);
//boxes[:, :, 1] = boxes[:, :, 1] / ratio_h
NDArray boxes2 = boxes.get(":, :, 1").div(ratio_h);
boxes.set(new NDIndex(":, :, 1"), boxes2);
NDList dt_boxes = this.filter_tag_det_res(boxes);
dt_boxes.detach();
// release Mat
srcMat.release();
mask.release();
structImage.release();
return dt_boxes;
}
private NDList filter_tag_det_res(NDArray dt_boxes) {
NDList boxesList = new NDList();
int num = (int) dt_boxes.getShape().get(0);
for (int i = 0; i < num; i++) {
NDArray box = dt_boxes.get(i);
box = order_points_clockwise(box);
box = clip_det_res(box);
float[] box0 = box.get(0).toFloatArray();
float[] box1 = box.get(1).toFloatArray();
float[] box3 = box.get(3).toFloatArray();
int rect_width = (int) Math.sqrt(Math.pow(box1[0] - box0[0], 2) + Math.pow(box1[1] - box0[1], 2));
int rect_height = (int) Math.sqrt(Math.pow(box3[0] - box0[0], 2) + Math.pow(box3[1] - box0[1], 2));
if (rect_width <= 3 || rect_height <= 3)
continue;
boxesList.add(box);
}
return boxesList;
}
private NDArray clip_det_res(NDArray points) {
for (int i = 0; i < points.getShape().get(0); i++) {
int value = Math.max((int) points.get(i, 0).toFloatArray()[0], 0);
value = Math.min(value, img_width - 1);
points.set(new NDIndex(i + ",0"), value);
value = Math.max((int) points.get(i, 1).toFloatArray()[0], 0);
value = Math.min(value, img_height - 1);
points.set(new NDIndex(i + ",1"), value);
}
return points;
}
/**
* sort the points based on their x-coordinates
*
*
* @param pts
* @return
*/
private NDArray order_points_clockwise(NDArray pts) {
NDList list = new NDList();
long[] indexes = pts.get(":, 0").argSort().toLongArray();
// grab the left-most and right-most points from the sorted
// x-roodinate points
Shape s1 = pts.getShape();
NDArray leftMost1 = pts.get(indexes[0] + ",:");
NDArray leftMost2 = pts.get(indexes[1] + ",:");
NDArray leftMost = leftMost1.concat(leftMost2).reshape(2, 2);
NDArray rightMost1 = pts.get(indexes[2] + ",:");
NDArray rightMost2 = pts.get(indexes[3] + ",:");
NDArray rightMost = rightMost1.concat(rightMost2).reshape(2, 2);
// now, sort the left-most coordinates according to their
// y-coordinates so we can grab the top-left and bottom-left
// points, respectively
indexes = leftMost.get(":, 1").argSort().toLongArray();
NDArray lt = leftMost.get(indexes[0] + ",:");
NDArray lb = leftMost.get(indexes[1] + ",:");
indexes = rightMost.get(":, 1").argSort().toLongArray();
NDArray rt = rightMost.get(indexes[0] + ",:");
NDArray rb = rightMost.get(indexes[1] + ",:");
list.add(lt);
list.add(rt);
list.add(rb);
list.add(lb);
NDArray rect = NDArrays.concat(list).reshape(4, 2);
return rect;
}
/**
* Get boxes from the binarized image predicted by DB
*
* @param manager
* @param pred the binarized image predicted by DB.
* @param mask new 'pred' after threshold filtering.
*/
private NDArray boxes_from_bitmap(NDManager manager, NDArray pred, Mat mask, float box_thresh) {
int dest_height = (int) pred.getShape().get(0);
int dest_width = (int) pred.getShape().get(1);
int height = mask.rows();
int width = mask.cols();
MatVector contours = new MatVector();
Mat hierarchy = new Mat();
// 寻找轮廓
findContours(
mask,
contours,
hierarchy,
opencv_imgproc.RETR_LIST,
opencv_imgproc.CHAIN_APPROX_SIMPLE,
new Point(0, 0));
int num_contours = Math.min((int) contours.size(), max_candidates);
NDList boxList = new NDList();
// NDArray boxes = manager.zeros(new Shape(num_contours, 4, 2), DataType.FLOAT32);
float[] scores = new float[num_contours];
int count = 0;
for (int index = 0; index < num_contours; index++) {
Mat contour = contours.get(index);
float[][] pointsArr = new float[4][2];
int sside = get_mini_boxes(contour, pointsArr);
if (sside < this.min_size)
continue;
NDArray points = manager.create(pointsArr);
float score = box_score_fast(manager, pred, points);
if (score < this.box_thresh)
continue;
NDArray box = unclip(manager, points); // TODO get_mini_boxes(box)
// box[:, 0] = np.clip(np.round(box[:, 0] / width * dest_width), 0, dest_width)
NDArray boxes1 = box.get(":,0").div(width).mul(dest_width).round().clip(0, dest_width);
box.set(new NDIndex(":, 0"), boxes1);
// box[:, 1] = np.clip(np.round(box[:, 1] / height * dest_height), 0, dest_height)
NDArray boxes2 = box.get(":,1").div(height).mul(dest_height).round().clip(0, dest_height);
box.set(new NDIndex(":, 1"), boxes2);
if (score > box_thresh) {
boxList.add(box);
// boxes.set(new NDIndex(count + ",:,:"), box);
scores[index] = score;
count++;
}
// release memory
contour.release();
}
// if (count < num_contours) {
// NDArray newBoxes = manager.zeros(new Shape(count, 4, 2), DataType.FLOAT32);
// newBoxes.set(new NDIndex("0,0,0"), boxes.get(":" + count + ",:,:"));
// boxes = newBoxes;
// }
NDArray boxes = NDArrays.stack(boxList);
// release
hierarchy.release();
contours.releaseReference();
return boxes;
}
/**
* Shrink or expand the boxaccording to 'unclip_ratio'
*
* @param points The predicted box.
* @return uncliped box
*/
private NDArray unclip(NDManager manager, NDArray points) {
points = order_points_clockwise(points);
float[] pointsArr = points.toFloatArray();
float[] lt = java.util.Arrays.copyOfRange(pointsArr, 0, 2);
float[] lb = java.util.Arrays.copyOfRange(pointsArr, 6, 8);
float[] rt = java.util.Arrays.copyOfRange(pointsArr, 2, 4);
float[] rb = java.util.Arrays.copyOfRange(pointsArr, 4, 6);
float width = distance(lt, rt);
float height = distance(lt, lb);
if (width > height) {
float k = (lt[1] - rt[1]) / (lt[0] - rt[0]); // y = k * x + b
float delta_dis = height;
float delta_x = (float) Math.sqrt((delta_dis * delta_dis) / (k * k + 1));
float delta_y = Math.abs(k * delta_x);
if (k > 0) {
pointsArr[0] = lt[0] - delta_x + delta_y;
pointsArr[1] = lt[1] - delta_y - delta_x;
pointsArr[2] = rt[0] + delta_x + delta_y;
pointsArr[3] = rt[1] + delta_y - delta_x;
pointsArr[4] = rb[0] + delta_x - delta_y;
pointsArr[5] = rb[1] + delta_y + delta_x;
pointsArr[6] = lb[0] - delta_x - delta_y;
pointsArr[7] = lb[1] - delta_y + delta_x;
} else {
pointsArr[0] = lt[0] - delta_x - delta_y;
pointsArr[1] = lt[1] + delta_y - delta_x;
pointsArr[2] = rt[0] + delta_x - delta_y;
pointsArr[3] = rt[1] - delta_y - delta_x;
pointsArr[4] = rb[0] + delta_x + delta_y;
pointsArr[5] = rb[1] - delta_y + delta_x;
pointsArr[6] = lb[0] - delta_x + delta_y;
pointsArr[7] = lb[1] + delta_y + delta_x;
}
} else {
float k = (lt[1] - rt[1]) / (lt[0] - rt[0]); // y = k * x + b
float delta_dis = width;
float delta_y = (float) Math.sqrt((delta_dis * delta_dis) / (k * k + 1));
float delta_x = Math.abs(k * delta_y);
if (k > 0) {
pointsArr[0] = lt[0] + delta_x - delta_y;
pointsArr[1] = lt[1] - delta_y - delta_x;
pointsArr[2] = rt[0] + delta_x + delta_y;
pointsArr[3] = rt[1] - delta_y + delta_x;
pointsArr[4] = rb[0] - delta_x + delta_y;
pointsArr[5] = rb[1] + delta_y + delta_x;
pointsArr[6] = lb[0] - delta_x - delta_y;
pointsArr[7] = lb[1] + delta_y - delta_x;
} else {
pointsArr[0] = lt[0] - delta_x - delta_y;
pointsArr[1] = lt[1] - delta_y + delta_x;
pointsArr[2] = rt[0] - delta_x + delta_y;
pointsArr[3] = rt[1] - delta_y - delta_x;
pointsArr[4] = rb[0] + delta_x + delta_y;
pointsArr[5] = rb[1] + delta_y - delta_x;
pointsArr[6] = lb[0] + delta_x - delta_y;
pointsArr[7] = lb[1] + delta_y + delta_x;
}
}
points = manager.create(pointsArr).reshape(4, 2);
return points;
}
private float distance(float[] point1, float[] point2) {
float disX = point1[0] - point2[0];
float disY = point1[1] - point2[1];
float dis = (float) Math.sqrt(disX * disX + disY * disY);
return dis;
}
/**
* Get boxes from the contour or box.
*
* @param contour The predicted contour.
* @param pointsArr The predicted box.
* @return smaller side of box
*/
private int get_mini_boxes(Mat contour, float[][] pointsArr) {
// https://blog.csdn.net/qq_37385726/article/details/82313558
// bounding_box[1] - rect 返回矩形的长和宽
RotatedRect rect = minAreaRect(contour);
Mat points = new Mat();
boxPoints(rect, points);
FloatRawIndexer ldIdx = points.createIndexer();
float[][] fourPoints = new float[4][2];
for (int row = 0; row < 4; row++) {
fourPoints[row][0] = ldIdx.get(row, 0);
fourPoints[row][1] = ldIdx.get(row, 1);
}
ldIdx.release();
float[] tmpPoint = new float[2];
for (int i = 0; i < 4; i++) {
for (int j = i + 1; j < 4; j++) {
if (fourPoints[j][0] < fourPoints[i][0]) {
tmpPoint[0] = fourPoints[i][0];
tmpPoint[1] = fourPoints[i][1];
fourPoints[i][0] = fourPoints[j][0];
fourPoints[i][1] = fourPoints[j][1];
fourPoints[j][0] = tmpPoint[0];
fourPoints[j][1] = tmpPoint[1];
}
}
}
int index_1 = 0;
int index_2 = 1;
int index_3 = 2;
int index_4 = 3;
if (fourPoints[1][1] > fourPoints[0][1]) {
index_1 = 0;
index_4 = 1;
} else {
index_1 = 1;
index_4 = 0;
}
if (fourPoints[3][1] > fourPoints[2][1]) {
index_2 = 2;
index_3 = 3;
} else {
index_2 = 3;
index_3 = 2;
}
pointsArr[0] = fourPoints[index_1];
pointsArr[1] = fourPoints[index_2];
pointsArr[2] = fourPoints[index_3];
pointsArr[3] = fourPoints[index_4];
int height = rect.boundingRect().height();
int width = rect.boundingRect().width();
int sside = Math.min(height, width);
// release
points.release();
rect.releaseReference();
return sside;
}
/**
* Calculate the score of box.
*
* @param bitmap The binarized image predicted by DB.
* @param points The predicted box
* @return
*/
private float box_score_fast(NDManager manager, NDArray bitmap, NDArray points) {
NDArray box = points.get(":");
long h = bitmap.getShape().get(0);
long w = bitmap.getShape().get(1);
// xmin = np.clip(np.floor(box[:, 0].min()).astype(np.int), 0, w - 1)
int xmin = box.get(":, 0").min().floor().clip(0, w - 1).toType(DataType.INT32, true).toIntArray()[0];
int xmax = box.get(":, 0").max().ceil().clip(0, w - 1).toType(DataType.INT32, true).toIntArray()[0];
int ymin = box.get(":, 1").min().floor().clip(0, h - 1).toType(DataType.INT32, true).toIntArray()[0];
int ymax = box.get(":, 1").max().ceil().clip(0, h - 1).toType(DataType.INT32, true).toIntArray()[0];
NDArray mask = manager.zeros(new Shape(ymax - ymin + 1, xmax - xmin + 1), DataType.UINT8);
box.set(new NDIndex(":, 0"), box.get(":, 0").sub(xmin));
box.set(new NDIndex(":, 1"), box.get(":, 1").sub(ymin));
//mask - convert from NDArray to Mat
byte[] maskArray = mask.toByteArray();
int rows = (int) mask.getShape().get(0);
int cols = (int) mask.getShape().get(1);
Mat maskMat = new Mat(rows, cols, CvType.CV_8U);
UByteRawIndexer ldIdx = maskMat.createIndexer();
for (int row = 0; row < rows; row++) {
for (int col = 0; col < cols; col++) {
ldIdx.put(row, col, maskArray[row * cols + col]);
}
}
ldIdx.release();
//mask - convert from NDArray to Mat
float[] boxArray = box.toFloatArray();
Mat boxMat = new Mat(4, 2, CvType.CV_32S);
IntRawIndexer intRawIndexer = boxMat.createIndexer();
for (int row = 0; row < 4; row++) {
intRawIndexer.put(row, 0, (int) boxArray[row * 2]);
intRawIndexer.put(row, 1, (int) boxArray[row * 2 + 1]);
}
intRawIndexer.release();
// boxMat.reshape(1, new int[]{1, 4, 2});
MatVector matVector = new MatVector();
matVector.put(boxMat);
fillPoly(maskMat, matVector, new Scalar(1));
NDArray subBitMap = bitmap.get(ymin + ":" + (ymax + 1) + "," + xmin + ":" + (xmax + 1));
float[] subBitMapArr = subBitMap.toFloatArray();
rows = (int) subBitMap.getShape().get(0);
cols = (int) subBitMap.getShape().get(1);
Mat bitMapMat = new Mat(rows, cols, CvType.CV_32F);
FloatRawIndexer floatRawIndexer = bitMapMat.createIndexer();
for (int row = 0; row < rows; row++) {
for (int col = 0; col < cols; col++) {
floatRawIndexer.put(row, col, subBitMapArr[row * cols + col]);
}
}
floatRawIndexer.release();
Scalar score = org.bytedeco.opencv.global.opencv_core.mean(bitMapMat, maskMat);
float scoreValue = (float) score.get();
// release
maskMat.release();
boxMat.release();
bitMapMat.release();
matVector.releaseReference();
score.releaseReference();
return scoreValue;
}
@Override
public NDList processInput(TranslatorContext ctx, Image input) {
NDArray img = input.toNDArray(ctx.getNDManager());
image = BufferedImageFactory.getInstance().fromNDArray(img);
int h = input.getHeight();
int w = input.getWidth();
img_height = h;
img_width = w;
int resize_w = w;
int resize_h = h;
// limit the max side
float ratio = 1.0f;
if (Math.max(resize_h, resize_w) > max_side_len) {
if (resize_h > resize_w) {
ratio = (float) max_side_len / (float) resize_h;
} else {
ratio = (float) max_side_len / (float) resize_w;
}
}
resize_h = (int) (resize_h * ratio);
resize_w = (int) (resize_w * ratio);
if (resize_h % 32 == 0) {
resize_h = resize_h;
} else if (Math.floor((float) resize_h / 32f) <= 1) {
resize_h = 32;
} else {
resize_h = (int) Math.floor((float) resize_h / 32f) * 32;
}
if (resize_w % 32 == 0) {
resize_w = resize_w;
} else if (Math.floor((float) resize_w / 32f) <= 1) {
resize_w = 32;
} else {
resize_w = (int) Math.floor((float) resize_w / 32f) * 32;
}
ratio_h = resize_h / (float) h;
ratio_w = resize_w / (float) w;
img = NDImageUtils.resize(img, resize_w, resize_h);
img = NDImageUtils.toTensor(img);
img =
NDImageUtils.normalize(
img,
new float[]{0.485f, 0.456f, 0.406f},
new float[]{0.229f, 0.224f, 0.225f});
img = img.expandDims(0);
return new NDList(img);
}
@Override
public Batchifier getBatchifier() {
return null;
}
}

@ -1,34 +0,0 @@
package jnpf.ocr_sdk.utils.detection;
import ai.djl.modality.cv.Image;
import ai.djl.ndarray.NDList;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap;
public final class OcrV3Detection {
private static final Logger logger = LoggerFactory.getLogger(OcrV3Detection.class);
public OcrV3Detection() {
}
public Criteria<Image, NDList> detectCriteria() {
Criteria<Image, NDList> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, NDList.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_PP-OCRv3_det_infer.zip")
// .optModelUrls(
// "/Users/calvin/Documents/build/paddle_models/ppocr/ch_PP-OCRv2_det_infer")
.optTranslator(new OCRDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
return criteria;
}
}

@ -1,120 +0,0 @@
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package jnpf.ocr_sdk.utils.detection;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.paddlepaddle.zoo.cv.objectdetection.BoundFinder;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.IntStream;
public class PpWordDetectionTranslator implements Translator<Image, DetectedObjects> {
private final int max_side_len;
public PpWordDetectionTranslator(Map<String, ?> arguments) {
max_side_len =
arguments.containsKey("maxLength")
? Integer.parseInt(arguments.get("maxLength").toString())
: 960;
}
@Override
public DetectedObjects processOutput(TranslatorContext ctx, NDList list) {
NDArray result = list.singletonOrThrow();
result = result.squeeze().mul(255f).toType(DataType.UINT8, true).gt(0.3); // thresh=0.3
boolean[] flattened = result.toBooleanArray();
Shape shape = result.getShape();
int w = (int) shape.get(0);
int h = (int) shape.get(1);
boolean[][] grid = new boolean[w][h];
IntStream.range(0, flattened.length)
.parallel()
.forEach(i -> grid[i / h][i % h] = flattened[i]);
List<BoundingBox> boxes = new BoundFinder(grid).getBoxes();
List<String> names = new ArrayList<>();
List<Double> probs = new ArrayList<>();
int boxSize = boxes.size();
for (int i = 0; i < boxSize; i++) {
names.add("word");
probs.add(1.0);
}
return new DetectedObjects(names, probs, boxes);
}
@Override
public NDList processInput(TranslatorContext ctx, Image input) {
NDArray img = input.toNDArray(ctx.getNDManager());
int h = input.getHeight();
int w = input.getWidth();
int resize_w = w;
int resize_h = h;
// limit the max side
float ratio = 1.0f;
if (Math.max(resize_h, resize_w) > max_side_len) {
if (resize_h > resize_w) {
ratio = (float) max_side_len / (float) resize_h;
} else {
ratio = (float) max_side_len / (float) resize_w;
}
}
resize_h = (int) (resize_h * ratio);
resize_w = (int) (resize_w * ratio);
if (resize_h % 32 == 0) {
resize_h = resize_h;
} else if (Math.floor((float) resize_h / 32f) <= 1) {
resize_h = 32;
} else {
resize_h = (int) Math.floor((float) resize_h / 32f) * 32;
}
if (resize_w % 32 == 0) {
resize_w = resize_w;
} else if (Math.floor((float) resize_w / 32f) <= 1) {
resize_w = 32;
} else {
resize_w = (int) Math.floor((float) resize_w / 32f) * 32;
}
img = NDImageUtils.resize(img, resize_w, resize_h);
img = NDImageUtils.toTensor(img);
img =
NDImageUtils.normalize(
img,
new float[]{0.485f, 0.456f, 0.406f},
new float[]{0.229f, 0.224f, 0.225f});
img = img.expandDims(0);
return new NDList(img);
}
@Override
public Batchifier getBatchifier() {
return null;
}
}

@ -1,32 +0,0 @@
package jnpf.ocr_sdk.utils.layout;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class LayoutDetection {
private static final Logger logger = LoggerFactory.getLogger(LayoutDetection.class);
public LayoutDetection() {}
public Criteria<Image, DetectedObjects> criteria() {
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ppyolov2_r50vd_dcn_365e_publaynet_infer.zip")
// .optModelUrls(
// "/Users/calvin/.paddledet/inference_model/ppyolov2_r50vd_dcn_365e_publaynet/ppyolov2_r50vd_dcn_365e_publaynet_infer")
.optTranslator(new LayoutDetectionTranslator())
.optProgress(new ProgressBar())
.build();
return criteria;
}
}

@ -1,104 +0,0 @@
package jnpf.ocr_sdk.utils.layout;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.util.ArrayList;
import java.util.List;
public class LayoutDetectionTranslator implements Translator<Image, DetectedObjects> {
private int width;
private int height;
public LayoutDetectionTranslator() {}
@Override
public DetectedObjects processOutput(TranslatorContext ctx, NDList list) {
NDArray result = list.get(0); // np_boxes
long rows = result.size(0);
List<BoundingBox> boxes = new ArrayList<>();
List<String> names = new ArrayList<>();
List<Double> probs = new ArrayList<>();
for (long i = 0; i < rows; i++) {
NDArray row = result.get(i);
float[] array = row.toFloatArray();
if (array[1] <= 0.5 || array[0] <= -1) continue;
int clsid = (int) array[0];
double score = array[1];
String name = "";
switch (clsid) {
case 0:
name = "Text";
break;
case 1:
name = "Title";
break;
case 2:
name = "List";
break;
case 3:
name = "Table";
break;
case 4:
name = "Figure";
break;
default:
name = "Unknown";
}
float x = array[2] / width;
float y = array[3] / height;
float w = (array[4] - array[2]) / width;
float h = (array[5] - array[3]) / height;
Rectangle rect = new Rectangle(x, y, w, h);
boxes.add(rect);
names.add(name);
probs.add(score);
}
return new DetectedObjects(names, probs, boxes);
}
@Override
public NDList processInput(TranslatorContext ctx, Image input) {
NDArray img = input.toNDArray(ctx.getNDManager());
width = input.getWidth();
height = input.getHeight();
img = NDImageUtils.resize(img, 640, 640);
img = img.transpose(2, 0, 1).div(255);
img =
NDImageUtils.normalize(
img, new float[] {0.485f, 0.456f, 0.406f}, new float[] {0.229f, 0.224f, 0.225f});
img = img.expandDims(0);
NDArray scale_factor = ctx.getNDManager().create(new float[] {640f / height, 640f / width});
scale_factor = scale_factor.toType(DataType.FLOAT32, false);
scale_factor = scale_factor.expandDims(0);
NDArray im_shape = ctx.getNDManager().create(new float[] {640f, 640f});
im_shape = im_shape.toType(DataType.FLOAT32, false);
im_shape = im_shape.expandDims(0);
// im_shape, image, scale_factor
return new NDList(im_shape, img, scale_factor);
}
@Override
public Batchifier getBatchifier() {
return null;
}
}

@ -1,64 +0,0 @@
package jnpf.ocr_sdk.utils.opencv;
import ai.djl.ndarray.NDArray;
import org.bytedeco.javacpp.indexer.DoubleRawIndexer;
import org.bytedeco.opencv.global.opencv_core;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Point2f;
import java.util.List;
public class NDArrayUtils {
// NDArray 转 opencv_core.Mat
public static Mat toOpenCVMat(NDArray points, int rows, int cols) {
double[] doubleArray = points.toDoubleArray();
// CV_32F = FloatRawIndexer
// CV_64F = DoubleRawIndexer
Mat mat = new Mat(rows, cols, opencv_core.CV_64F);
DoubleRawIndexer ldIdx = mat.createIndexer();
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
ldIdx.put(i, j, doubleArray[i * cols + j]);
}
}
ldIdx.release();
return mat;
}
// NDArray 转 opencv_core.Point2f
public static Point2f toOpenCVPoint2f(NDArray points, int rows) {
double[] doubleArray = points.toDoubleArray();
Point2f points2f = new Point2f(rows);
for (int i = 0; i < rows; i++) {
points2f.position(i).x((float) doubleArray[i * 2]).y((float) doubleArray[i * 2 + 1]);
}
return points2f;
}
// Double array 转 opencv_core.Point2f
public static Point2f toOpenCVPoint2f(double[] doubleArray, int rows) {
Point2f points2f = new Point2f(rows);
for (int i = 0; i < rows; i++) {
points2f.position(i).x((float) doubleArray[i * 2]).y((float) doubleArray[i * 2 + 1]);
}
return points2f;
}
// list 转 opencv_core.Point2f
public static Point2f toOpenCVPoint2f(List<ai.djl.modality.cv.output.Point> points, int rows) {
Point2f points2f = new Point2f(points.size());
for (int i = 0; i < rows; i++) {
ai.djl.modality.cv.output.Point point = points.get(i);
points2f.position(i).x((float) point.getX()).y((float) point.getY());
}
return points2f;
}
}

@ -1,72 +0,0 @@
package jnpf.ocr_sdk.utils.opencv;
import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Point2f;
import org.opencv.core.CvType;
import org.opencv.imgproc.Imgproc;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.awt.image.WritableRaster;
public class OpenCVUtils {
public static Mat perspectiveTransform(
Mat src, Point2f srcPoints, Point2f dstPoints) {
Mat dst = src.clone();
Mat warp_mat = opencv_imgproc.getPerspectiveTransform(srcPoints.position(0), dstPoints.position(0));
opencv_imgproc.warpPerspective(src, dst, warp_mat, dst.size());
warp_mat.release();
return dst;
}
/**
* Mat to BufferedImage
*
* @param mat
* @return
*/
public static BufferedImage mat2Image(org.opencv.core.Mat mat) {
int width = mat.width();
int height = mat.height();
byte[] data = new byte[width * height * (int) mat.elemSize()];
Imgproc.cvtColor(mat, mat, 4);
mat.get(0, 0, data);
BufferedImage ret = new BufferedImage(width, height, 5);
ret.getRaster().setDataElements(0, 0, width, height, data);
return ret;
}
public static BufferedImage matToBufferedImage(org.opencv.core.Mat frame) {
int type = 0;
if (frame.channels() == 1) {
type = BufferedImage.TYPE_BYTE_GRAY;
} else if (frame.channels() == 3) {
type = BufferedImage.TYPE_3BYTE_BGR;
}
BufferedImage image = new BufferedImage(frame.width(), frame.height(), type);
WritableRaster raster = image.getRaster();
DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer();
byte[] data = dataBuffer.getData();
frame.get(0, 0, data);
return image;
}
/**
* BufferedImage to Mat
*
* @param img
* @return
*/
public static org.opencv.core.Mat image2Mat(BufferedImage img) {
int width = img.getWidth();
int height = img.getHeight();
byte[] data = ((DataBufferByte) img.getRaster().getDataBuffer()).getData();
org.opencv.core.Mat mat = new org.opencv.core.Mat(height, width, CvType.CV_8UC3);
mat.put(0, 0, data);
return mat;
}
}

@ -1,129 +0,0 @@
package jnpf.ocr_sdk.utils.recognition;
import ai.djl.inference.Predictor;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.paddlepaddle.zoo.cv.objectdetection.PpWordDetectionTranslator;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
public final class OcrV3AlignedRecognition {
private static final Logger logger = LoggerFactory.getLogger(OcrV3AlignedRecognition.class);
public OcrV3AlignedRecognition() {
}
public DetectedObjects predict(
Image image, Predictor<Image, DetectedObjects> detector, Predictor<Image, String> recognizer)
throws TranslateException {
DetectedObjects detections = detector.predict(image);
List<DetectedObjects.DetectedObject> boxes = detections.items();
List<String> names = new ArrayList<>();
List<Double> prob = new ArrayList<>();
List<BoundingBox> rect = new ArrayList<>();
long timeInferStart = System.currentTimeMillis();
for (int i = 0; i < boxes.size(); i++) {
Image subImg = getSubImage(image, boxes.get(i).getBoundingBox());
if (subImg.getHeight() * 1.0 / subImg.getWidth() > 1.5) {
subImg = rotateImg(subImg);
}
// ImageUtils.saveImage(subImg, i + ".png", "build/output");
String name = recognizer.predict(subImg);
names.add(name);
prob.add(-1.0);
rect.add(boxes.get(i).getBoundingBox());
}
long timeInferEnd = System.currentTimeMillis();
System.out.println("time: " + (timeInferEnd - timeInferStart));
DetectedObjects detectedObjects = new DetectedObjects(names, prob, rect);
return detectedObjects;
}
public Criteria<Image, DetectedObjects> detectCriteria() {
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_PP-OCRv3_det_infer.zip")
// .optModelUrls(
// "/Users/calvin/Documents/build/paddle_models/ppocr/ch_PP-OCRv2_det_infer")
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
return criteria;
}
public Criteria<Image, String> recognizeCriteria() {
Criteria<Image, String> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, String.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_PP-OCRv3_rec_infer.zip")
.optProgress(new ProgressBar())
.optTranslator(new PpWordRecognitionTranslator((new ConcurrentHashMap<String, String>())))
.build();
return criteria;
}
private Image getSubImage(Image img, BoundingBox box) {
Rectangle rect = box.getBounds();
double[] extended = extendRect(rect.getX(), rect.getY(), rect.getWidth(), rect.getHeight());
int width = img.getWidth();
int height = img.getHeight();
int[] recovered = {
(int) (extended[0] * width),
(int) (extended[1] * height),
(int) (extended[2] * width),
(int) (extended[3] * height)
};
return img.getSubImage(recovered[0], recovered[1], recovered[2], recovered[3]);
}
private double[] extendRect(double xmin, double ymin, double width, double height) {
double centerx = xmin + width / 2;
double centery = ymin + height / 2;
if (width > height) {
width += height * 2.0;
height *= 3.0;
} else {
height += width * 2.0;
width *= 3.0;
}
double newX = centerx - width / 2 < 0 ? 0 : centerx - width / 2;
double newY = centery - height / 2 < 0 ? 0 : centery - height / 2;
double newWidth = newX + width > 1 ? 1 - newX : width;
double newHeight = newY + height > 1 ? 1 - newY : height;
return new double[]{newX, newY, newWidth, newHeight};
}
private Image rotateImg(Image image) {
try (NDManager manager = NDManager.newBaseManager()) {
NDArray rotated = NDImageUtils.rotate90(image.toNDArray(manager), 1);
return ImageFactory.getInstance().fromNDArray(rotated);
}
}
}

@ -1,194 +0,0 @@
package jnpf.ocr_sdk.utils.recognition;
import ai.djl.inference.Predictor;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.paddlepaddle.zoo.cv.objectdetection.PpWordDetectionTranslator;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import jnpf.ocr_sdk.utils.common.ImageInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
public final class OcrV3MultiThreadRecognition {
private static final Logger logger = LoggerFactory.getLogger(OcrV3MultiThreadRecognition.class);
public OcrV3MultiThreadRecognition() {
}
public DetectedObjects predict(
Image image, List<ZooModel> recModels, Predictor<Image, DetectedObjects> detector, int threadNum)
throws TranslateException {
DetectedObjects detections = detector.predict(image);
List<DetectedObjects.DetectedObject> boxes = detections.items();
ConcurrentLinkedQueue<ImageInfo> queue = new ConcurrentLinkedQueue<>();
for (int i = 0; i < boxes.size(); i++) {
BoundingBox box = boxes.get(i).getBoundingBox();
Image subImg = getSubImage(image, box);
if (subImg.getHeight() * 1.0 / subImg.getWidth() > 1.5) {
subImg = rotateImg(subImg);
}
ImageInfo imageInfo = new ImageInfo(subImg, box);
queue.add(imageInfo);
}
List<InferCallable> callables = new ArrayList<>(threadNum);
for (int i = 0; i < threadNum; i++) {
callables.add(new InferCallable(recModels.get(i), queue));
}
ExecutorService es = Executors.newFixedThreadPool(threadNum);
List<ImageInfo> resultList = new ArrayList<>();
try {
List<Future<List<ImageInfo>>> futures = new ArrayList<>();
long timeInferStart = System.currentTimeMillis();
for (InferCallable callable : callables) {
futures.add(es.submit(callable));
}
for (Future<List<ImageInfo>> future : futures) {
List<ImageInfo> subList = future.get();
if (subList != null) {
resultList.addAll(subList);
}
}
long timeInferEnd = System.currentTimeMillis();
System.out.println("time: " + (timeInferEnd - timeInferStart));
for (InferCallable callable : callables) {
callable.close();
}
} catch (InterruptedException | ExecutionException e) {
logger.error("", e);
} finally {
es.shutdown();
}
List<String> names = new ArrayList<>();
List<Double> prob = new ArrayList<>();
List<BoundingBox> rect = new ArrayList<>();
for (ImageInfo imageInfo : resultList) {
names.add(imageInfo.getName());
prob.add(imageInfo.getProb());
rect.add(imageInfo.getBox());
}
DetectedObjects detectedObjects = new DetectedObjects(names, prob, rect);
return detectedObjects;
}
public Criteria<Image, DetectedObjects> detectCriteria() {
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_PP-OCRv3_det_infer.zip")
// .optModelUrls(
// "/Users/calvin/Documents/build/paddle_models/ppocr/ch_PP-OCRv2_det_infer")
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
return criteria;
}
public Criteria<Image, String> recognizeCriteria() {
Criteria<Image, String> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, String.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_PP-OCRv3_rec_infer.zip")
.optProgress(new ProgressBar())
.optTranslator(new PpWordRecognitionTranslator((new ConcurrentHashMap<String, String>())))
.build();
return criteria;
}
private static class InferCallable implements Callable<List<ImageInfo>> {
private Predictor<Image, String> recognizer;
private ConcurrentLinkedQueue<ImageInfo> queue;
private List<ImageInfo> resultList = new ArrayList<>();
public InferCallable(ZooModel recognitionModel, ConcurrentLinkedQueue<ImageInfo> queue){
recognizer = recognitionModel.newPredictor();
this.queue = queue;
}
public List<ImageInfo> call() {
try {
ImageInfo imageInfo = queue.poll();
while (imageInfo != null) {
String name = recognizer.predict(imageInfo.getImage());
imageInfo.setName(name);
imageInfo.setProb(-1.0);
resultList.add(imageInfo);
imageInfo = queue.poll();
}
} catch (Exception e) {
e.printStackTrace();
}
return resultList;
}
public void close() {
recognizer.close();
}
}
private Image getSubImage(Image img, BoundingBox box) {
Rectangle rect = box.getBounds();
double[] extended = extendRect(rect.getX(), rect.getY(), rect.getWidth(), rect.getHeight());
int width = img.getWidth();
int height = img.getHeight();
int[] recovered = {
(int) (extended[0] * width),
(int) (extended[1] * height),
(int) (extended[2] * width),
(int) (extended[3] * height)
};
return img.getSubImage(recovered[0], recovered[1], recovered[2], recovered[3]);
}
private double[] extendRect(double xmin, double ymin, double width, double height) {
double centerx = xmin + width / 2;
double centery = ymin + height / 2;
if (width > height) {
width += height * 2.0;
height *= 3.0;
} else {
height += width * 2.0;
width *= 3.0;
}
double newX = centerx - width / 2 < 0 ? 0 : centerx - width / 2;
double newY = centery - height / 2 < 0 ? 0 : centery - height / 2;
double newWidth = newX + width > 1 ? 1 - newX : width;
double newHeight = newY + height > 1 ? 1 - newY : height;
return new double[]{newX, newY, newWidth, newHeight};
}
private Image rotateImg(Image image) {
try (NDManager manager = NDManager.newBaseManager()) {
NDArray rotated = NDImageUtils.rotate90(image.toNDArray(manager), 1);
return ImageFactory.getInstance().fromNDArray(rotated);
}
}
}

@ -1,140 +0,0 @@
package jnpf.ocr_sdk.utils.recognition;
import ai.djl.inference.Predictor;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.Point;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.opencv.OpenCVImageFactory;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import jnpf.ocr_sdk.utils.common.RotatedBox;
import jnpf.ocr_sdk.utils.opencv.NDArrayUtils;
import jnpf.ocr_sdk.utils.opencv.OpenCVUtils;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.opencv.opencv_core.Point2f;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
public final class OcrV3Recognition {
private static final Logger logger = LoggerFactory.getLogger(OcrV3Recognition.class);
public OcrV3Recognition() {
}
public Criteria<Image, String> recognizeCriteria() {
Criteria<Image, String> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, String.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_PP-OCRv3_rec_infer.zip")
.optProgress(new ProgressBar())
.optTranslator(new PpWordRecognitionTranslator((new ConcurrentHashMap<String, String>())))
.build();
return criteria;
}
public List<RotatedBox> predict(
Image image, Predictor<Image, NDList> detector, Predictor<Image, String> recognizer)
throws TranslateException {
NDList boxes = detector.predict(image);
List<RotatedBox> result = new ArrayList<>();
long timeInferStart = System.currentTimeMillis();
OpenCVFrameConverter.ToMat cv = new OpenCVFrameConverter.ToMat();
OpenCVFrameConverter.ToMat converter1 = new OpenCVFrameConverter.ToMat();
OpenCVFrameConverter.ToOrgOpenCvCoreMat converter2 = new OpenCVFrameConverter.ToOrgOpenCvCoreMat();
for (int i = 0; i < boxes.size(); i++) {
NDArray box = boxes.get(i);
// BufferedImage bufferedImage = get_rotate_crop_image(image, box);
float[] pointsArr = box.toFloatArray();
float[] lt = java.util.Arrays.copyOfRange(pointsArr, 0, 2);
float[] rt = java.util.Arrays.copyOfRange(pointsArr, 2, 4);
float[] rb = java.util.Arrays.copyOfRange(pointsArr, 4, 6);
float[] lb = java.util.Arrays.copyOfRange(pointsArr, 6, 8);
int img_crop_width = (int) Math.max(distance(lt, rt), distance(rb, lb));
int img_crop_height = (int) Math.max(distance(lt, lb), distance(rt, rb));
List<Point> srcPoints = new ArrayList<>();
srcPoints.add(new Point(lt[0], lt[1]));
srcPoints.add(new Point(rt[0], rt[1]));
srcPoints.add(new Point(rb[0], rb[1]));
srcPoints.add(new Point(lb[0], lb[1]));
List<Point> dstPoints = new ArrayList<>();
dstPoints.add(new Point(0, 0));
dstPoints.add(new Point(img_crop_width, 0));
dstPoints.add(new Point(img_crop_width, img_crop_height));
dstPoints.add(new Point(0, img_crop_height));
Point2f srcPoint2f = NDArrayUtils.toOpenCVPoint2f(srcPoints, 4);
Point2f dstPoint2f = NDArrayUtils.toOpenCVPoint2f(dstPoints, 4);
BufferedImage bufferedImage = OpenCVUtils.matToBufferedImage((org.opencv.core.Mat) image.getWrappedImage());
// try {
// File outputfile = new File("build/output/srcImage.jpg");
// ImageIO.write(bufferedImage, "jpg", outputfile);
// } catch (IOException e) {
// e.printStackTrace();
// }
org.bytedeco.opencv.opencv_core.Mat mat = cv.convertToMat(new Java2DFrameConverter().convert(bufferedImage));
org.bytedeco.opencv.opencv_core.Mat dstMat = OpenCVUtils.perspectiveTransform(mat, srcPoint2f, dstPoint2f);
org.opencv.core.Mat cvMat = converter2.convert(converter1.convert(dstMat));
Image subImg = OpenCVImageFactory.getInstance().fromImage(cvMat);
// ImageUtils.saveImage(subImg, i + ".png", "build/output");
subImg = subImg.getSubImage(0,0,img_crop_width,img_crop_height);
if (subImg.getHeight() * 1.0 / subImg.getWidth() > 1.5) {
subImg = rotateImg(subImg);
}
String name = recognizer.predict(subImg);
RotatedBox rotatedBox = new RotatedBox(box, name);
result.add(rotatedBox);
mat.release();
dstMat.release();
cvMat.release();
srcPoint2f.releaseReference();
dstPoint2f.releaseReference();
}
cv.close();
converter1.close();
converter2.close();
long timeInferEnd = System.currentTimeMillis();
System.out.println("time: " + (timeInferEnd - timeInferStart));
return result;
}
private BufferedImage get_rotate_crop_image(Image image, NDArray box) {
return null;
}
private float distance(float[] point1, float[] point2) {
float disX = point1[0] - point2[0];
float disY = point1[1] - point2[1];
float dis = (float) Math.sqrt(disX * disX + disY * disY);
return dis;
}
private Image rotateImg(Image image) {
try (NDManager manager = NDManager.newBaseManager()) {
NDArray rotated = NDImageUtils.rotate90(image.toNDArray(manager), 1);
return ImageFactory.getInstance().fromNDArray(rotated);
}
}
}

@ -1,108 +0,0 @@
package jnpf.ocr_sdk.utils.recognition;
import ai.djl.Model;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.Utils;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
public class PpWordRecognitionTranslator implements Translator<Image, String> {
private List<String> table;
private final boolean use_space_char;
public PpWordRecognitionTranslator(Map<String, ?> arguments) {
use_space_char =
arguments.containsKey("use_space_char")
? Boolean.parseBoolean(arguments.get("use_space_char").toString())
: false;
}
@Override
public void prepare(TranslatorContext ctx) throws IOException {
Model model = ctx.getModel();
try (InputStream is = model.getArtifact("ppocr_keys_v1.txt").openStream()) {
table = Utils.readLines(is, true);
table.add(0, "blank");
if(use_space_char)
table.add(" ");
else
table.add("");
}
}
@Override
public String processOutput(TranslatorContext ctx, NDList list) throws IOException {
StringBuilder sb = new StringBuilder();
NDArray tokens = list.singletonOrThrow();
long[] indices = tokens.get(0).argMax(1).toLongArray();
boolean[] selection = new boolean[indices.length];
Arrays.fill(selection, true);
for (int i = 1; i < indices.length; i++) {
if (indices[i] == indices[i - 1]) {
selection[i] = false;
}
}
// 字符置信度
// float[] probs = new float[indices.length];
// for (int row = 0; row < indices.length; row++) {
// NDArray value = tokens.get(0).get(new NDIndex(""+ row +":" + (row + 1) +"," + indices[row] +":" + ( indices[row] + 1)));
// probs[row] = value.toFloatArray()[0];
// }
int lastIdx = 0;
for (int i = 0; i < indices.length; i++) {
if (selection[i] == true && indices[i] > 0 && !(i > 0 && indices[i] == lastIdx)) {
sb.append(table.get((int) indices[i]));
}
}
return sb.toString();
}
@Override
public NDList processInput(TranslatorContext ctx, Image input) {
NDArray img = input.toNDArray(ctx.getNDManager(), Image.Flag.COLOR);
int imgC = 3;
int imgH = 48;
int imgW = 320;//192 320
int h = input.getHeight();
int w = input.getWidth();
float ratio = (float) w / (float) h;
imgW = (int)(imgH * ratio);
int resized_w;
if (Math.ceil(imgH * ratio) > imgW) {
resized_w = imgW;
} else {
resized_w = (int) (Math.ceil(imgH * ratio));
}
img = NDImageUtils.resize(img, resized_w, imgH);
img = img.transpose(2, 0, 1).div(255).sub(0.5f).div(0.5f);
NDArray padding_im = ctx.getNDManager().zeros(new Shape(imgC, imgH, imgW), DataType.FLOAT32);
padding_im.set(new NDIndex(":,:,0:" + resized_w), img);
padding_im = padding_im.expandDims(0);
return new NDList(padding_im);
}
@Override
public Batchifier getBatchifier() {
return null;
}
}

@ -1,69 +0,0 @@
package jnpf.ocr_sdk.utils.rotation;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.ndarray.types.Shape;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import java.util.Arrays;
import java.util.List;
public class PpWordRotateTranslator implements Translator<Image, Classifications> {
List<String> classes = Arrays.asList("No Rotate", "Rotate");
public PpWordRotateTranslator() {}
public Classifications processOutput(TranslatorContext ctx, NDList list) {
NDArray prob = list.singletonOrThrow();
return new Classifications(this.classes, prob);
}
public NDList processInput(TranslatorContext ctx, Image input) throws Exception {
NDArray img = input.toNDArray(ctx.getNDManager());
img = NDImageUtils.resize(img, 192, 48);
img = NDImageUtils.toTensor(img).sub(0.5F).div(0.5F);
img = img.expandDims(0);
return new NDList(new NDArray[]{img});
}
public NDList processInputBak(TranslatorContext ctx, Image input) throws Exception {
NDArray img = input.toNDArray(ctx.getNDManager());
int imgC = 3;
int imgH = 48;
int imgW = 192;
NDArray array = ctx.getNDManager().zeros(new Shape(imgC, imgH, imgW));
int h = input.getHeight();
int w = input.getWidth();
int resized_w = 0;
float ratio = (float) w / (float) h;
if (Math.ceil(imgH * ratio) > imgW) {
resized_w = imgW;
} else {
resized_w = (int) (Math.ceil(imgH * ratio));
}
img = NDImageUtils.resize(img, resized_w, imgH);
img = NDImageUtils.toTensor(img).sub(0.5F).div(0.5F);
// img = img.transpose(2, 0, 1);
array.set(new NDIndex(":,:,0:" + resized_w), img);
array = array.expandDims(0);
return new NDList(new NDArray[] {array});
}
public Batchifier getBatchifier() {
return null;
}
}

@ -1,235 +0,0 @@
package jnpf.ocr_sdk.utils.table;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.poi.hssf.usermodel.*;
import org.apache.poi.ss.usermodel.BorderStyle;
import org.apache.poi.ss.usermodel.CellType;
import org.apache.poi.ss.usermodel.HorizontalAlignment;
import org.apache.poi.ss.usermodel.VerticalAlignment;
import org.apache.poi.ss.util.CellRangeAddress;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import java.util.ArrayList;
import java.util.List;
/**
* @Auther: xiaoqiang
* @Date: 2020/12/9 9:16
* @Description:
*/
public class ConvertHtml2Excel {
/**
* htmlexcel
*
* @param tableHtml
* <table>
* ..
* </table>
* @return
*/
public static HSSFWorkbook table2Excel(String tableHtml) {
HSSFWorkbook wb = new HSSFWorkbook();
HSSFSheet sheet = wb.createSheet();
List<CrossRangeCellMeta> crossRowEleMetaLs = new ArrayList<>();
int rowIndex = 0;
try {
Document data = DocumentHelper.parseText(tableHtml);
// 生成表头
Element thead = data.getRootElement().element("thead");
HSSFCellStyle titleStyle = getTitleStyle(wb);
int ls=0;//列数
if (thead != null) {
List<Element> trLs = thead.elements("tr");
for (Element trEle : trLs) {
HSSFRow row = sheet.createRow(rowIndex);
List<Element> thLs = trEle.elements("td");
ls=thLs.size();
makeRowCell(thLs, rowIndex, row, 0, titleStyle, crossRowEleMetaLs);
rowIndex++;
}
}
// 生成表体
Element tbody = data.getRootElement().element("tbody");
HSSFCellStyle contentStyle = getContentStyle(wb);
if (tbody != null) {
List<Element> trLs = tbody.elements("tr");
for (Element trEle : trLs) {
HSSFRow row = sheet.createRow(rowIndex);
List<Element> thLs = trEle.elements("th");
int cellIndex = makeRowCell(thLs, rowIndex, row, 0, titleStyle, crossRowEleMetaLs);
List<Element> tdLs = trEle.elements("td");
makeRowCell(tdLs, rowIndex, row, cellIndex, contentStyle, crossRowEleMetaLs);
rowIndex++;
}
}
// 合并表头
for (CrossRangeCellMeta crcm : crossRowEleMetaLs) {
sheet.addMergedRegion(new CellRangeAddress(crcm.getFirstRow(), crcm.getLastRow(), crcm.getFirstCol(), crcm.getLastCol()));
setRegionStyle(sheet, new CellRangeAddress(crcm.getFirstRow(), crcm.getLastRow(), crcm.getFirstCol(), crcm.getLastCol()),titleStyle);
}
for(int i=0;i<sheet.getRow(0).getPhysicalNumberOfCells();i++){
sheet.autoSizeColumn(i, true);//设置列宽
if(sheet.getColumnWidth(i)<255*256){
sheet.setColumnWidth(i, sheet.getColumnWidth(i) < 9000 ? 9000 : sheet.getColumnWidth(i));
}else{
sheet.setColumnWidth(i, 15000);
}
}
} catch (DocumentException e) {
e.printStackTrace();
}
return wb;
}
/**
*
*
* @return cell index
*/
/**
* @param tdLs thtd
* @param rowIndex
* @param row POI
* @param startCellIndex
* @param cellStyle
* @param crossRowEleMetaLs
* @return
*/
private static int makeRowCell(List<Element> tdLs, int rowIndex, HSSFRow row, int startCellIndex, HSSFCellStyle cellStyle,
List<CrossRangeCellMeta> crossRowEleMetaLs) {
int i = startCellIndex;
for (int eleIndex = 0; eleIndex < tdLs.size(); i++, eleIndex++) {
int captureCellSize = getCaptureCellSize(rowIndex, i, crossRowEleMetaLs);
while (captureCellSize > 0) {
for (int j = 0; j < captureCellSize; j++) {// 当前行跨列处理(补单元格)
row.createCell(i);
i++;
}
captureCellSize = getCaptureCellSize(rowIndex, i, crossRowEleMetaLs);
}
Element thEle = tdLs.get(eleIndex);
String val = thEle.getTextTrim();
if (StringUtils.isBlank(val)) {
Element e = thEle.element("a");
if (e != null) {
val = e.getTextTrim();
}
}
HSSFCell c = row.createCell(i);
if (NumberUtils.isNumber(val)) {
c.setCellValue(Double.parseDouble(val));
c.setCellType(CellType.NUMERIC);
} else {
c.setCellValue(val);
}
int rowSpan = NumberUtils.toInt(thEle.attributeValue("rowspan"), 1);
int colSpan = NumberUtils.toInt(thEle.attributeValue("colspan"), 1);
c.setCellStyle(cellStyle);
if (rowSpan > 1 || colSpan > 1) { // 存在跨行或跨列
crossRowEleMetaLs.add(new CrossRangeCellMeta(rowIndex, i, rowSpan, colSpan));
}
if (colSpan > 1) {// 当前行跨列处理(补单元格)
for (int j = 1; j < colSpan; j++) {
i++;
row.createCell(i);
}
}
}
return i;
}
/**
*
*
* @param sheet
* @param region
* @param cs
*/
public static void setRegionStyle(HSSFSheet sheet, CellRangeAddress region, HSSFCellStyle cs) {
for (int i = region.getFirstRow(); i <= region.getLastRow(); i++) {
HSSFRow row = sheet.getRow(i);
for (int j = region.getFirstColumn(); j <= region.getLastColumn(); j++) {
HSSFCell cell = row.getCell(j);
cell.setCellStyle(cs);
}
}
}
/**
* rowSpan
*
* @param rowIndex
* @param colIndex
* @param crossRowEleMetaLs
* @return
*/
private static int getCaptureCellSize(int rowIndex, int colIndex, List<CrossRangeCellMeta> crossRowEleMetaLs) {
int captureCellSize = 0;
for (CrossRangeCellMeta crossRangeCellMeta : crossRowEleMetaLs) {
if (crossRangeCellMeta.getFirstRow() < rowIndex && crossRangeCellMeta.getLastRow() >= rowIndex) {
if (crossRangeCellMeta.getFirstCol() <= colIndex && crossRangeCellMeta.getLastCol() >= colIndex) {
captureCellSize = crossRangeCellMeta.getLastCol() - colIndex + 1;
}
}
}
return captureCellSize;
}
/**
*
*
* @param workbook
* @return
*/
private static HSSFCellStyle getTitleStyle(HSSFWorkbook workbook) {
//short titlebackgroundcolor = IndexedColors.GREY_25_PERCENT.index;
short fontSize = 12;
String fontName = "宋体";
HSSFCellStyle style = workbook.createCellStyle();
style.setVerticalAlignment(VerticalAlignment.CENTER);
style.setAlignment(HorizontalAlignment.CENTER);
style.setBorderBottom(BorderStyle.THIN); //下边框
style.setBorderLeft(BorderStyle.THIN);//左边框
style.setBorderTop(BorderStyle.THIN);//上边框
style.setBorderRight(BorderStyle.THIN);//右边框
//style.setFillPattern(FillPatternType.SOLID_FOREGROUND);
//style.setFillForegroundColor(titlebackgroundcolor);// 背景色
HSSFFont font = workbook.createFont();
font.setFontName(fontName);
font.setFontHeightInPoints(fontSize);
font.setBold(true);
style.setFont(font);
return style;
}
/**
*
*
* @param wb
* @return
*/
private static HSSFCellStyle getContentStyle(HSSFWorkbook wb) {
short fontSize = 12;
String fontName = "宋体";
HSSFCellStyle style = wb.createCellStyle();
style.setBorderBottom(BorderStyle.THIN); //下边框
style.setBorderLeft(BorderStyle.THIN);//左边框
style.setBorderTop(BorderStyle.THIN);//上边框
style.setBorderRight(BorderStyle.THIN);//右边框
HSSFFont font = wb.createFont();
font.setFontName(fontName);
font.setFontHeightInPoints(fontSize);
style.setFont(font);
style.setAlignment(HorizontalAlignment.CENTER);//水平居中
style.setVerticalAlignment(VerticalAlignment.CENTER);//垂直居中
style.setWrapText(true);
return style;
}
}

@ -1,42 +0,0 @@
package jnpf.ocr_sdk.utils.table;
/**
* @Auther: xiaoqiang
* @Date: 2020/12/9 9:17
* @Description:
*/
public class CrossRangeCellMeta {
public CrossRangeCellMeta(int firstRowIndex, int firstColIndex, int rowSpan, int colSpan) {
super();
this.firstRowIndex = firstRowIndex;
this.firstColIndex = firstColIndex;
this.rowSpan = rowSpan;
this.colSpan = colSpan;
}
private int firstRowIndex;
private int firstColIndex;
private int rowSpan;// 跨越行数
private int colSpan;// 跨越列数
public int getFirstRow() {
return firstRowIndex;
}
public int getLastRow() {
return firstRowIndex + rowSpan - 1;
}
public int getFirstCol() {
return firstColIndex;
}
public int getLastCol() {
return firstColIndex + colSpan - 1;
}
public int getColSpan(){
return colSpan;
}
}

@ -1,227 +0,0 @@
package jnpf.ocr_sdk.utils.table;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
public final class TableDetection {
private static final Logger logger = LoggerFactory.getLogger(TableDetection.class);
public TableDetection() {}
public Criteria<Image, TableResult> criteria() {
Criteria<Image, TableResult> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, TableResult.class)
.optModelUrls(
"https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/en_table.zip")
// .optModelUrls(
// "/Users/calvin/Documents/build/paddle_models/ppocr/en_ppocr_mobile_v2.0_table_structure_infer")
.optOption("removePass", "repeated_fc_relu_fuse_pass")
.optTranslator(new TableStructTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
return criteria;
}
public List<String> cellContents(
DetectedObjects textDetections, List<BoundingBox> cells, int width, int height) {
List<DetectedObjects.DetectedObject> dt_boxes = textDetections.items();
// 获取 Cell 与 文本检测框 的对应关系(1:N)。
Map<Integer, List<Integer>> matched = new ConcurrentHashMap<>();
for (int i = 0; i < dt_boxes.size(); i++) {
DetectedObjects.DetectedObject item = dt_boxes.get(i);
Rectangle textBounds = item.getBoundingBox().getBounds();
int[] box_1 = rectXYXY(textBounds, width, height);
// 获取两两cell之间的L1距离和 1- IOU
List<Pair<Float, Float>> distances = new ArrayList<>();
for (BoundingBox cell : cells) {
Rectangle cellBounds = cell.getBounds();
int[] box_2 = rectXYXY(cellBounds, width, height);
float distance = distance(box_1, box_2);
float iou = 1 - compute_iou(box_1, box_2);
distances.add(Pair.of(distance, iou));
}
// 根据距离和IOU挑选最"近"的cell
Pair<Float, Float> nearest = sorted(distances);
// 获取最小距离对应的下标id也等价于cell的下标id distances列表是根据遍历cells生成的
int id = 0;
for (int idx = 0; idx < distances.size(); idx++) {
Pair<Float, Float> current = distances.get(idx);
if (current.getLeft().floatValue() == nearest.getLeft().floatValue()
&& current.getRight().floatValue() == nearest.getRight().floatValue()) {
id = idx;
break;
}
}
if (!matched.containsKey(id)) {
List<Integer> textIds = new ArrayList<>();
textIds.add(i);
// cell id, text id list (dt_boxes index list)
matched.put(id, textIds);
} else {
matched.get(id).add(i);
}
}
List<String> cell_contents = new ArrayList<>();
List<Double> probs = new ArrayList<>();
for (int i = 0; i < cells.size(); i++) {
List<Integer> textIds = matched.get(i);
List<String> contents = new ArrayList<>();
String content = "";
if (textIds != null) {
for (Integer id : textIds) {
DetectedObjects.DetectedObject item = dt_boxes.get(id);
contents.add(item.getClassName());
}
content = StringUtils.join(contents, " ");
}
cell_contents.add(content);
probs.add(-1.0);
}
return cell_contents;
}
/**
* Calculate L1 distance
*
* @param box_1
* @param box_2
* @return
*/
private int distance(int[] box_1, int[] box_2) {
int x1 = box_1[0];
int y1 = box_1[1];
int x2 = box_1[2];
int y2 = box_1[3];
int x3 = box_2[0];
int y3 = box_2[1];
int x4 = box_2[2];
int y4 = box_2[3];
int dis = Math.abs(x3 - x1) + Math.abs(y3 - y1) + Math.abs(x4 - x2) + Math.abs(y4 - y2);
int dis_2 = Math.abs(x3 - x1) + Math.abs(y3 - y1);
int dis_3 = Math.abs(x4 - x2) + Math.abs(y4 - y2);
return dis + Math.min(dis_2, dis_3);
}
/**
* Get absolute coordinations
*
* @param rect
* @param width
* @param height
* @return
*/
private int[] rectXYXY(Rectangle rect, int width, int height) {
int left = Math.max((int) (width * rect.getX()), 0);
int top = Math.max((int) (height * rect.getY()), 0);
int right = Math.min((int) (width * (rect.getX() + rect.getWidth())), width - 1);
int bottom = Math.min((int) (height * (rect.getY() + rect.getHeight())), height - 1);
return new int[] {left, top, right, bottom};
}
/**
* computing IoU
*
* @param rec1: (y0, x0, y1, x1), which reflects (top, left, bottom, right)
* @param rec2: (y0, x0, y1, x1)
* @return scala value of IoU
*/
private float compute_iou(int[] rec1, int[] rec2) {
// computing area of each rectangles
int S_rec1 = (rec1[2] - rec1[0]) * (rec1[3] - rec1[1]);
int S_rec2 = (rec2[2] - rec2[0]) * (rec2[3] - rec2[1]);
// computing the sum_area
int sum_area = S_rec1 + S_rec2;
// find the each edge of intersect rectangle
int left_line = Math.max(rec1[1], rec2[1]);
int right_line = Math.min(rec1[3], rec2[3]);
int top_line = Math.max(rec1[0], rec2[0]);
int bottom_line = Math.min(rec1[2], rec2[2]);
// judge if there is an intersect
if (left_line >= right_line || top_line >= bottom_line) {
return 0.0f;
} else {
float intersect = (right_line - left_line) * (bottom_line - top_line);
return (intersect / (sum_area - intersect)) * 1.0f;
}
}
/**
* Distance sorted
*
* @param distances
* @return
*/
private Pair<Float, Float> sorted(List<Pair<Float, Float>> distances) {
Comparator<Pair<Float, Float>> comparator =
new Comparator<Pair<Float, Float>>() {
@Override
public int compare(Pair<Float, Float> a1, Pair<Float, Float> a2) {
// 首先根据IoU排序
if (a1.getRight().floatValue() > a2.getRight().floatValue()) {
return 1;
} else if (a1.getRight().floatValue() == a2.getRight().floatValue()) {
// 然后根据L1距离排序
if (a1.getLeft().floatValue() > a2.getLeft().floatValue()) {
return 1;
}
return -1;
}
return -1;
}
};
// 距离排序
List<Pair<Float, Float>> newDistances = new ArrayList<>();
CollectionUtils.addAll(newDistances, new Object[distances.size()]);
Collections.copy(newDistances, distances);
Collections.sort(newDistances, comparator);
return newDistances.get(0);
}
/**
* Generate table html
*
* @param pred_structures
* @param cell_contents
* @return
*/
public String get_pred_html(List<String> pred_structures, List<String> cell_contents) {
StringBuffer html = new StringBuffer();
int td_index = 0;
for (String tag : pred_structures) {
if (tag.contains("</td>")) {
String content = cell_contents.get(td_index);
html.append(content);
td_index++;
}
html.append(tag);
}
return html.toString();
}
}

@ -1,31 +0,0 @@
package jnpf.ocr_sdk.utils.table;
import ai.djl.modality.cv.output.BoundingBox;
import java.util.List;
public class TableResult {
private List<String> structure_str_list;
private List<BoundingBox> boxes;
public TableResult(List<String> structure_str_list, List<BoundingBox> boxes) {
this.structure_str_list = structure_str_list;
this.boxes = boxes;
}
public List<String> getStructure_str_list() {
return structure_str_list;
}
public void setStructure_str_list(List<String> structure_str_list) {
this.structure_str_list = structure_str_list;
}
public List<BoundingBox> getBoxes() {
return boxes;
}
public void setBoxes(List<BoundingBox> boxes) {
this.boxes = boxes;
}
}

@ -1,246 +0,0 @@
package jnpf.ocr_sdk.utils.table;
import ai.djl.Model;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.index.NDIndex;
import ai.djl.ndarray.types.DataType;
import ai.djl.ndarray.types.Shape;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.Utils;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class TableStructTranslator implements Translator<Image, TableResult> {
private final int maxLength;
private int height;
private int width;
private float xScale = 1.0f;
private float yScale = 1.0f;
public TableStructTranslator(Map<String, ?> arguments) {
maxLength =
arguments.containsKey("maxLength")
? Integer.parseInt(arguments.get("maxLength").toString())
: 488;
}
private Map<String, String> dict_idx_character = new ConcurrentHashMap<>();
private Map<String, String> dict_character_idx = new ConcurrentHashMap<>();
private Map<String, String> dict_idx_elem = new ConcurrentHashMap<>();
private Map<String, String> dict_elem_idx = new ConcurrentHashMap<>();
private String beg_str = "sos";
private String end_str = "eos";
@Override
public void prepare(TranslatorContext ctx) throws IOException {
Model model = ctx.getModel();
// ppocr_keys_v1.txt
try (InputStream is = model.getArtifact("table_structure_dict.txt").openStream()) {
List<String> lines = Utils.readLines(is, false);
String[] substr = lines.get(0).trim().split("\\t");
int characterNum = Integer.parseInt(substr[0]);
int elemNum = Integer.parseInt(substr[1]);
List<String> listCharacter = new ArrayList<>();
List<String> listElem = new ArrayList<>();
for (int i = 1; i < 1 + characterNum; i++) {
listCharacter.add(lines.get(i).trim());
}
for (int i = 1 + characterNum; i < 1 + characterNum + elemNum; i++) {
listElem.add(lines.get(i).trim());
}
listCharacter.add(0, beg_str);
listCharacter.add(end_str);
listElem.add(0, beg_str);
listElem.add(end_str);
for (int i = 0; i < listCharacter.size(); i++) {
dict_idx_character.put("" + i, listCharacter.get(i));
dict_character_idx.put(listCharacter.get(i), "" + i);
}
for (int i = 0; i < listElem.size(); i++) {
dict_idx_elem.put("" + i, listElem.get(i));
dict_elem_idx.put(listElem.get(i), "" + i);
}
}
}
@Override
public NDList processInput(TranslatorContext ctx, Image input) {
NDArray img = input.toNDArray(ctx.getNDManager(), Image.Flag.COLOR);
height = input.getHeight();
width = input.getWidth();
// img = ResizeTableImage(img, height, width, maxLength);
// img = PaddingTableImage(ctx, img, maxLength);
img = NDImageUtils.resize(img, 488, 488);
// img = NDImageUtils.toTensor(img);
img = img.transpose(2, 0, 1).div(255).flip(0);
img =
NDImageUtils.normalize(
img, new float[] {0.485f, 0.456f, 0.406f}, new float[] {0.229f, 0.224f, 0.225f});
img = img.expandDims(0);
return new NDList(img);
}
@Override
public TableResult processOutput(TranslatorContext ctx, NDList list) {
NDArray locPreds = list.get(0);
NDArray structureProbs = list.get(1);
NDArray structure_idx = structureProbs.argMax(2);
NDArray structure_probs = structureProbs.max(new int[] {2});
List<List<String>> result_list = new ArrayList<>();
List<List<String>> result_pos_list = new ArrayList<>();
List<List<String>> result_score_list = new ArrayList<>();
List<List<String>> result_elem_idx_list = new ArrayList<>();
List<String> res_html_code_list = new ArrayList<>();
List<NDArray> res_loc_list = new ArrayList<>();
// get ignored tokens
int beg_idx = Integer.parseInt(dict_elem_idx.get(beg_str));
int end_idx = Integer.parseInt(dict_elem_idx.get(end_str));
long batch_size = structure_idx.size(0); // len(text_index)
for (int batch_idx = 0; batch_idx < batch_size; batch_idx++) {
List<String> char_list = new ArrayList<>();
List<String> elem_pos_list = new ArrayList<>();
List<String> elem_idx_list = new ArrayList<>();
List<String> score_list = new ArrayList<>();
long len = structure_idx.get(batch_idx).size();
for (int idx = 0; idx < len; idx++) {
int tmp_elem_idx = (int) structure_idx.get(batch_idx).get(idx).toLongArray()[0];
if (idx > 0 && tmp_elem_idx == end_idx) {
break;
}
if (tmp_elem_idx == beg_idx || tmp_elem_idx == end_idx) {
continue;
}
char_list.add(dict_idx_elem.get("" + tmp_elem_idx));
elem_pos_list.add("" + idx);
score_list.add("" + structure_probs.get(batch_idx, idx).toFloatArray()[0]);
elem_idx_list.add("" + tmp_elem_idx);
}
result_list.add(char_list); // structure_str
result_pos_list.add(elem_pos_list);
result_score_list.add(score_list);
result_elem_idx_list.add(elem_idx_list);
}
int batch_num = result_list.size();
for (int bno = 0; bno < batch_num; bno++) {
NDList res_loc = new NDList();
int len = result_list.get(bno).size();
for (int sno = 0; sno < len; sno++) {
String text = result_list.get(bno).get(sno);
if (text.equals("<td>") || text.equals("<td")) {
int pos = Integer.parseInt(result_pos_list.get(bno).get(sno));
res_loc.add(locPreds.get(bno, pos));
}
}
String res_html_code = StringUtils.join(result_list.get(bno), "");
res_html_code_list.add(res_html_code);
NDArray array = NDArrays.stack(res_loc);
res_loc_list.add(array);
}
// structure_str_list result_list
// res_loc res_loc_list
List<BoundingBox> boxes = new ArrayList<>();
long rows = res_loc_list.get(0).size(0);
for (int rno = 0; rno < rows; rno++) {
float[] arr = res_loc_list.get(0).get(rno).toFloatArray();
Rectangle rect = new Rectangle(arr[0], arr[1], (arr[2] - arr[0]), (arr[3] - arr[1]));
boxes.add(rect);
}
List<String> structure_str_list = result_list.get(0);
structure_str_list.add(0, "<table>");
structure_str_list.add(0, "<body>");
structure_str_list.add(0, "<html>");
structure_str_list.add("</table>");
structure_str_list.add("</body>");
structure_str_list.add("</html>");
TableResult result = new TableResult(structure_str_list, boxes);
return result;
}
@Override
public Batchifier getBatchifier() {
return null;
}
private NDArray ResizeTableImage(NDArray img, int height, int width, int maxLen) {
int localMax = Math.max(height, width);
float ratio = maxLen * 1.0f / localMax;
int resize_h = (int) (height * ratio);
int resize_w = (int) (width * ratio);
if(width > height){
xScale = 1.0f;
yScale = ratio;
} else{
xScale = ratio;
yScale = 1.0f;
}
img = NDImageUtils.resize(img, resize_w, resize_h);
return img;
}
private NDArray PaddingTableImage(TranslatorContext ctx, NDArray img, int maxLen) {
Image srcImg = ImageFactory.getInstance().fromNDArray(img.duplicate());
saveImage(srcImg, "img.png", "build/output");
NDArray paddingImg = ctx.getNDManager().zeros(new Shape(maxLen, maxLen, 3), DataType.UINT8);
// NDManager manager = NDManager.newBaseManager();
// NDArray paddingImg = manager.zeros(new Shape(maxLen, maxLen, 3), DataType.UINT8);
paddingImg.set(
new NDIndex("0:" + img.getShape().get(0) + ",0:" + img.getShape().get(1) + ",:"), img);
Image image = ImageFactory.getInstance().fromNDArray(paddingImg);
saveImage(image, "paddingImg.png", "build/output");
return paddingImg;
}
public void saveImage(Image img, String name, String path) {
Path outputDir = Paths.get(path);
Path imagePath = outputDir.resolve(name);
// OpenJDK 不能保存 jpg 图片的 alpha channel
try {
img.save(Files.newOutputStream(imagePath), "png");
} catch (IOException e) {
e.printStackTrace();
}
}
}

@ -1279,7 +1279,7 @@ if (pMap!=null&&pMap.size()>0){
ArrayList<Integer> ints = new ArrayList<>();
for (int i = 0; i < num1; i++) {
PurchaseorderDTO model = dataList.get(i);
if (model.getPoundDate()!=null){
if (model.getPoundDate()!=null&&!model.getPoundDate().isEmpty()&&!model.getPoundDate().equals("null")){
// model.setCreatorTime(DateUtil.cstFormat(model.getCreatorTime()));
Date date = new SimpleDateFormat("yyyy.MM.dd").parse(model.getPoundDate());
@ -1431,7 +1431,7 @@ if (pMap!=null&&pMap.size()>0){
}
for (int i = 0; i < ints.size(); i++) {
Integer integer = ints.get(i);
dataList.remove(integer);
dataList.remove(dataList.get(integer));
}
BillRuleController bean = SpringContext.getBean(BillRuleController.class);
String str2="purchase";

@ -1,10 +1,11 @@
package jnpf.saleorder.controller;
import cn.afterturn.easypoi.excel.ExcelExportUtil;
import cn.afterturn.easypoi.excel.entity.ExportParams;
import cn.afterturn.easypoi.excel.entity.params.ExcelExportEntity;
import cn.afterturn.easypoi.excel.entity.enmus.ExcelType;
import cn.afterturn.easypoi.excel.entity.params.ExcelExportEntity;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
@ -15,10 +16,10 @@ import jnpf.arinvoices.service.ArinvoicesService;
import jnpf.arinvoices.service.Arinvoices_item0Service;
import jnpf.arinvoices.service.Arinvoices_item1Service;
import jnpf.base.ActionResult;
import jnpf.base.vo.PageListVO;
import jnpf.base.vo.PaginationVO;
import jnpf.base.UserInfo;
import jnpf.base.vo.DownloadVO;
import jnpf.base.vo.PageListVO;
import jnpf.base.vo.PaginationVO;
import jnpf.collection.entity.Collection_item0Entity;
import jnpf.collection.service.Collection_item0Service;
import jnpf.config.ConfigValueUtil;
@ -27,67 +28,34 @@ import jnpf.materialvo.entity.MaterialEntity;
import jnpf.materialvo.service.MaterialService;
import jnpf.poundlist.entity.PoundlistEntity;
import jnpf.poundlist.service.PoundlistService;
import jnpf.receiptout.entity.ReceiptoutEntity;
import jnpf.receiptout.entity.Receiptout_item0Entity;
import jnpf.receiptout.service.ReceiptoutService;
import jnpf.receiptout.service.Receiptout_item0Service;
import jnpf.reservoirarea.entity.ReservoirareaEntity;
import jnpf.reservoirarea.service.ReservoirareaService;
import jnpf.saleorder.mapper.ReceiptoutsoitemMapper;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.ibatis.annotations.Param;
import org.bytedeco.opencv.presets.opencv_core;
import org.springframework.security.core.parameters.P;
import org.springframework.transaction.annotation.Transactional;
import jnpf.base.entity.ProvinceEntity;
import jnpf.saleorder.model.saleorderitem.*;
import jnpf.saleorder.model.saleorderitem.SaleorderitemPagination;
import jnpf.saleorder.entity.*;
import jnpf.saleorder.entity.Salesorder_item0Entity;
import jnpf.saleorder.entity.ReceiptoutsoitemEntity;
import jnpf.saleorder.entity.Arinvoices_item0soitemEntity;
import jnpf.saleorder.entity.CollectionsoitemEntity;
import jnpf.saleorder.entity.SalesbacksoitemEntity;
import jnpf.saleorder.entity.PaymentsoitemEntity;
import jnpf.saleorder.model.saleorderitem.*;
import jnpf.saleorder.service.*;
import jnpf.util.*;
import jnpf.base.util.*;
import jnpf.base.vo.ListVO;
import jnpf.util.context.SpringContext;
import cn.hutool.core.util.ObjectUtil;
import lombok.extern.slf4j.Slf4j;
import jnpf.util.enums.FileTypeEnum;
import jnpf.util.file.UploadUtil;
import lombok.Cleanup;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Workbook;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import jnpf.saleorder.entity.SaleorderitemEntity;
import jnpf.saleorder.service.SaleorderitemService;
import jnpf.saleorder.entity.Salesorder_item0Entity;
import jnpf.saleorder.service.Salesorder_item0Service;
import jnpf.saleorder.entity.ReceiptoutsoitemEntity;
import jnpf.saleorder.service.ReceiptoutsoitemService;
import jnpf.saleorder.entity.Arinvoices_item0soitemEntity;
import jnpf.saleorder.service.Arinvoices_item0soitemService;
import jnpf.saleorder.entity.CollectionsoitemEntity;
import jnpf.saleorder.service.CollectionsoitemService;
import jnpf.saleorder.entity.SalesbacksoitemEntity;
import jnpf.saleorder.service.SalesbacksoitemService;
import jnpf.saleorder.entity.PaymentsoitemEntity;
import jnpf.saleorder.service.PaymentsoitemService;
import org.springframework.web.bind.annotation.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import jnpf.util.GeneraterSwapUtil;
import java.math.BigDecimal;
import java.util.*;
import jnpf.util.file.UploadUtil;
import jnpf.util.enums.FileTypeEnum;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*

@ -1,24 +1,11 @@
package jnpf.saleorder.service;
import jnpf.receiptout.entity.Receiptout_item0Entity;
import jnpf.saleorder.entity.Salesorder_item0Entity;
import jnpf.saleorder.service.Salesorder_item0Service;
import jnpf.saleorder.entity.ReceiptoutsoitemEntity;
import jnpf.saleorder.service.ReceiptoutsoitemService;
import jnpf.saleorder.entity.Arinvoices_item0soitemEntity;
import jnpf.saleorder.service.Arinvoices_item0soitemService;
import jnpf.saleorder.entity.CollectionsoitemEntity;
import jnpf.saleorder.service.CollectionsoitemService;
import jnpf.saleorder.entity.SalesbacksoitemEntity;
import jnpf.saleorder.service.SalesbacksoitemService;
import jnpf.saleorder.entity.PaymentsoitemEntity;
import jnpf.saleorder.service.PaymentsoitemService;
import jnpf.saleorder.entity.SaleorderitemEntity;
import com.baomidou.mybatisplus.extension.service.IService;
import jnpf.receiptout.entity.Receiptout_item0Entity;
import jnpf.saleorder.entity.*;
import jnpf.saleorder.model.saleorderitem.SaleorderitemPagination;
import org.bytedeco.opencv.presets.opencv_core;
import java.util.*;
import java.util.List;
/**
*
* saleorder

@ -1,25 +1,25 @@
package jnpf.tradeupload.controller;
import ai.djl.ModelException;
import ai.djl.translate.TranslateException;
import cn.afterturn.easypoi.excel.ExcelExportUtil;
import cn.afterturn.easypoi.excel.entity.ExportParams;
import cn.afterturn.easypoi.excel.entity.params.ExcelExportEntity;
import cn.afterturn.easypoi.excel.entity.enmus.ExcelType;
import cn.afterturn.easypoi.excel.entity.params.ExcelExportEntity;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import jnpf.base.ActionResult;
import jnpf.base.NoDataSourceBind;
import jnpf.base.vo.PageListVO;
import jnpf.base.vo.PaginationVO;
import jnpf.base.UserInfo;
import jnpf.base.util.OptimizeUtil;
import jnpf.base.vo.DownloadVO;
import jnpf.base.vo.PageListVO;
import jnpf.base.vo.PaginationVO;
import jnpf.config.ConfigValueUtil;
import jnpf.constant.MsgCode;
import jnpf.contractfile.entity.ContractFileEntity;
@ -28,51 +28,36 @@ import jnpf.customer.entity.CustomerEntity;
import jnpf.customer.service.CustomerService;
import jnpf.exception.DataException;
import jnpf.model.UploaderVO;
import jnpf.ocr_sdk.baiduUtils.VatInvoice;
import jnpf.ocr_sdk.controller.BaiduOcrController;
import jnpf.permission.entity.UserEntity;
import jnpf.tradeupload.entity.TradeuploadEntity;
import jnpf.tradeupload.model.tradeupload.*;
import jnpf.tradeupload.service.TradeuploadService;
import jnpf.util.*;
import jnpf.util.enums.FileTypeEnum;
import jnpf.util.file.UploadUtil;
import jnpf.utils.YozoUtils;
import jnpf.vehicle.entity.VehicleEntity;
import jnpf.vehicle.service.VehicleService;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.ss.formula.functions.T;
import org.bytedeco.opencv.presets.opencv_core;
import org.springframework.http.MediaType;
import org.springframework.transaction.annotation.Transactional;
import jnpf.base.entity.ProvinceEntity;
import jnpf.tradeupload.model.tradeupload.*;
import jnpf.tradeupload.model.tradeupload.TradeuploadPagination;
import jnpf.tradeupload.entity.*;
import jnpf.util.*;
import jnpf.base.util.*;
import jnpf.base.vo.ListVO;
import jnpf.util.context.SpringContext;
import cn.hutool.core.util.ObjectUtil;
import lombok.extern.slf4j.Slf4j;
import lombok.Cleanup;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Workbook;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import jnpf.tradeupload.entity.TradeuploadEntity;
import jnpf.tradeupload.service.TradeuploadService;
import org.springframework.web.bind.annotation.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.validation.Valid;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import jnpf.util.GeneraterSwapUtil;
import java.io.InputStream;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import jnpf.util.file.UploadUtil;
import jnpf.util.enums.FileTypeEnum;
import jnpf.ocr_sdk.baiduUtils.VatInvoice;
/**
*
* tradeupload
@ -424,7 +409,7 @@ public class TradeuploadController {
*/
@ApiOperation("磅单识别")
@PostMapping("/poundai")
public ActionResult poundAI(MultipartFile file ) throws ModelException, TranslateException, IOException {
public ActionResult poundAI(MultipartFile file ) throws IOException {
TradeuploadCrForm tradeuploadCrForm = new TradeuploadCrForm();
String result = VatInvoice.weightNote(file);
Map<String,Object> map = JsonUtil.stringToMap(result);

Loading…
Cancel
Save