Compare commits

..

74 Commits

Author SHA1 Message Date
6fe41c54cf fix: vendor Spire jar to avoid repo timeouts 2026-03-02 00:44:59 +08:00
8ad2fb7cd9 fix: use official e-iceblue repository 2026-03-02 00:26:13 +08:00
576a83f406 fix: drop flaky JAI repository 2026-03-02 00:13:17 +08:00
34e0b42c2f fix: stabilize Maven settings for Spire build 2026-03-02 00:03:33 +08:00
19d7f921be fix: default ACR username 2026-03-01 20:55:47 +08:00
82c71f26d4 fix: align ACR script with cabinet interface 2026-03-01 20:26:36 +08:00
8412ca739c fix: load ACR config from env file 2026-03-01 14:47:21 +08:00
6486d733b7 chore: ignore IDE metadata 2026-03-01 14:33:18 +08:00
92ebb5db20 chore: remove docs and helper scripts 2026-03-01 14:33:18 +08:00
7679e21024 chore: add ACR build/push script 2026-02-28 17:48:05 +08:00
98506927be test 2026-01-05 19:54:54 +08:00
775a7a3cc2 test 2025-11-28 15:59:44 +08:00
27ed834c09 test 2025-11-26 18:38:09 +08:00
6525e5ce80 test 2025-11-25 22:04:13 +08:00
34f508df7a fix 2025-11-25 17:44:34 +08:00
3b7371aca0 init 2025-11-24 17:19:23 +08:00
85953347db test 2025-11-24 16:14:12 +08:00
e604bacfa6 test 2025-11-24 15:01:03 +08:00
b75f05bac9 test 2025-11-24 12:21:46 +08:00
6912919692 test 2025-11-23 14:26:04 +08:00
1027622380 test 2025-11-22 23:17:17 +08:00
21cd636205 test 2025-11-22 22:58:12 +08:00
0d6b9866e1 test 2025-11-22 22:11:33 +08:00
f552f3a1cf test 2025-11-22 21:03:27 +08:00
18fe6cbfc5 test 2025-11-22 20:10:28 +08:00
c000932105 test 2025-11-22 20:05:53 +08:00
248d91729f test 2025-11-22 14:22:36 +08:00
a0933afc97 test 2025-11-18 18:19:37 +08:00
7b76a4d2f7 test dockerfile 2025-11-18 08:51:29 +08:00
dda1a4935a test 2025-11-11 18:18:35 +08:00
981b2fefba test 2025-11-11 18:06:09 +08:00
4d3baa1920 test 2025-11-11 17:58:35 +08:00
a8595c7403 test 2025-11-11 17:45:09 +08:00
c5e422f058 test 2025-11-07 18:08:54 +08:00
6d6e9ab8c8 test 2025-11-03 15:09:53 +08:00
3401c943ce test 2025-11-01 19:39:31 +08:00
a192e5f434 test 2025-11-01 19:31:37 +08:00
cf8d4f1735 test 2025-11-01 19:28:08 +08:00
6b4908f52b test 2025-11-01 19:24:35 +08:00
cc430a2d9d test 2025-11-01 19:07:38 +08:00
4d95b15df5 test 2025-11-01 19:03:23 +08:00
5d6f20b003 test 2025-11-01 18:34:41 +08:00
99b5c6177a test 2025-11-01 18:25:04 +08:00
c7c314b851 test 2025-11-01 18:00:12 +08:00
d21ff35d17 test 2025-11-01 17:37:42 +08:00
bd0fc6a284 test 2025-11-01 17:19:40 +08:00
aa7f4fb686 test 2025-11-01 17:13:29 +08:00
41a4c28f39 test 2025-11-01 17:06:47 +08:00
ce40cafad2 test 2025-11-01 17:02:16 +08:00
287571b421 test 2025-11-01 16:57:54 +08:00
e8eb65b01d test 2025-11-01 16:56:39 +08:00
ccf7ed7411 test 2025-11-01 16:52:19 +08:00
5ed4fbca92 test 2025-11-01 16:44:44 +08:00
3c5df92c06 test 2025-11-01 16:42:15 +08:00
da355d8bd3 test 2025-11-01 16:35:44 +08:00
e4cb0bb76e test 2025-11-01 16:23:50 +08:00
1117815a9a test 2025-11-01 16:18:57 +08:00
a5572a157e test 2025-11-01 16:07:12 +08:00
c347b12ea7 test 2025-11-01 15:50:06 +08:00
b30a5b6779 test 2025-11-01 15:44:47 +08:00
46fe0a6d1a test 2025-11-01 15:38:04 +08:00
20e8d3b342 test 2025-11-01 15:36:47 +08:00
b2dc9708eb test 2025-11-01 15:26:17 +08:00
fb590926e7 test 2025-11-01 15:24:04 +08:00
68d434e3c4 test 2025-11-01 14:57:14 +08:00
a1e2bea798 test 2025-11-01 14:23:30 +08:00
5beb48e11c test: es 2025-10-31 17:54:57 +08:00
7ea83200d2 test: sync contract 2025-10-31 16:15:47 +08:00
1a615c9374 todog 2025-10-28 16:26:20 +08:00
97135dfb25 todo 2025-10-27 17:41:25 +08:00
135cc18c4e todo 2025-10-27 13:47:03 +08:00
8e5774955d fix: bug 2025-10-27 09:28:10 +08:00
17bd458302 fix: bug 2025-10-24 10:20:31 +08:00
4ba880ea40 test 2025-10-23 07:54:14 +08:00
276 changed files with 10386 additions and 339955 deletions

81
.dockerignore Normal file
View File

@@ -0,0 +1,81 @@
# Maven构建目录保留target目录用于多段构建
target/classes/
target/test-classes/
target/maven-archiver/
target/maven-status/
# settings.xml需要复制到构建容器中所以不忽略
# settings.xml
# IDE文件
.idea/
.vscode/
*.iml
*.ipr
*.iws
# 日志文件
logs/
*.log
# 临时文件
temp/
*.tmp
*.temp
# 系统文件
.DS_Store
Thumbs.db
# Git文件
.git/
.gitignore
# Docker文件
Dockerfile*
docker-compose*.yml
.dockerignore
# 文档文件
README.md
*.md
docs/
# 测试文件
src/test/
# 配置文件(敏感信息)
application.properties.backup
*.properties.backup
# 节点模块
node_modules/
npm-debug.log*
# Python相关
__pycache__/
*.py[cod]
*$py.class
# 备份文件
*.bak
*.backup
*.swp
*.swo
# 环境配置文件
.env
.env.local
.env.*.local
# 本地数据目录
data/
upload/
temp/
unzip/
images/
reports/
# Maven wrapper
.mvn/wrapper/maven-wrapper.jar
.mvn/wrapper/maven-wrapper.properties

6
.gitignore vendored
View File

@@ -10,3 +10,9 @@
/out/
*.iml
/target
# Eclipse / STS
/.classpath
/.project
/.factorypath
/.settings/

215
Dockerfile Normal file
View File

@@ -0,0 +1,215 @@
# ===== 基础镜像阶段 =====
# 使用更小的Alpine OpenJRE镜像
FROM docker.aipper.de/eclipse-temurin:8-jre-alpine AS base
# 设置维护者信息
LABEL maintainer="digital-archive-team"
# 使用阿里云镜像源并安装基础包包含OCR和字体支持
RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \
apk add --no-cache \
ca-certificates \
curl \
bash \
# 基础字体包
ttf-dejavu \
# 中文字体支持包
ttf-freefont \
ttf-liberation \
ttf-inconsolata \
# 字体配置工具
fontconfig \
tini \
# OCR相关包
tesseract-ocr \
tesseract-ocr-data-chi_sim \
tesseract-ocr-data-eng \
&& rm -rf /var/cache/apk/*
# 备用字体安装(如果需要更多中文字体支持)
RUN echo "Installing additional Chinese fonts..." && \
apk add --no-cache \
# 备用中文字体包
font-adobe-100dpi \
font-adobe-75dpi \
font-alias \
font-util \
|| echo "Some font packages not available, continuing with default fonts"
# 配置字体支持(从资源目录复制自定义字体)
RUN if [ -f "/build/src/main/resources/SIMYOU.TTF" ]; then \
mkdir -p /usr/share/fonts && \
cp /build/src/main/resources/SIMYOU.TTF /usr/share/fonts/ && \
echo "Copied SIMYOU.TTF to fonts directory" && \
fc-cache -fv; \
else \
echo "SIMYOU.TTF not found in resources, using system fonts"; \
fi
# 生成字体缓存以确保系统字体正确识别
RUN fc-cache -fv || echo "Font cache generation completed with warnings"
# ===== Maven 构建阶段 =====
# 使用更小的Alpine Maven镜像进行构建
FROM docker.aipper.de/maven:3.9.9-eclipse-temurin-8-alpine AS builder
# 设置工作目录
WORKDIR /build
# 创建Maven仓库并设置权限
RUN mkdir -p /root/.m2/repository && \
chown -R root:root /root/.m2
# 复制settings.xml和pom.xml文件
COPY settings.xml /root/.m2/
COPY pom.xml .
# 复制本地lib目录中的JAR文件现在包含twain4java
COPY src/main/lib/ /tmp/local-jars/
# 手动安装本地JAR到Maven仓库修复版本一致性和错误处理
RUN mvn install:install-file \
-Dfile=/tmp/local-jars/aspose-cells-8.5.2.jar \
-DgroupId=com.aspose \
-DartifactId=aspose-cells \
-Dversion=8.5.2 \
-Dpackaging=jar \
-B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml
RUN mvn install:install-file \
-Dfile=/tmp/local-jars/aspose-words-15.8.0-jdk16.jar \
-DgroupId=com.aspose \
-DartifactId=aspose-words \
-Dversion=15.8.0 \
-Dpackaging=jar \
-B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml
RUN mvn install:install-file \
-Dfile=/tmp/local-jars/jai_codec-1.1.3.jar \
-DgroupId=javax.media \
-DartifactId=jai_codec \
-Dversion=1.1.3 \
-Dpackaging=jar \
-B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml
RUN mvn install:install-file \
-Dfile=/tmp/local-jars/jai_core.jar \
-DgroupId=javax.media \
-DartifactId=jai_core \
-Dversion=1.0.0-SNAPSHOT \
-Dpackaging=jar \
-B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml
RUN mvn install:install-file \
-Dfile=/tmp/local-jars/jce-0.0.1.jar \
-DgroupId=org.bouncycastle \
-DartifactId=jce \
-Dversion=0.0.1 \
-Dpackaging=jar \
-B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml
RUN mvn install:install-file \
-Dfile=/tmp/local-jars/agent-1.0.0.jar \
-DgroupId=com.yh \
-DartifactId=scofd \
-Dversion=1.0.1 \
-Dpackaging=jar \
-B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml
# 安装twain4java JAR用于扫描仪功能
RUN mvn install:install-file \
-Dfile=/tmp/local-jars/twain4java-0.3.3-all.jar \
-DgroupId=twain4java \
-DartifactId=twain4java \
-Dversion=0.3.3-all \
-Dpackaging=jar \
-B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml
# 确保Maven仓库权限正确
RUN chown -R root:root /root/.m2 && \
chmod -R 755 /root/.m2
# 复制源代码
COPY src ./src
# 验证依赖和构建环境
RUN echo "=== 验证Maven环境 ===" && \
mvn -version && \
echo "=== 验证本地JAR文件 ===" && \
ls -la /tmp/local-jars/ && \
echo "=== 下载依赖(不编译) ===" && \
mvn dependency:go-offline -B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml -e || \
mvn dependency:resolve -B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml -e
# 构建应用(移除离线模式,允许下载依赖)
RUN echo "=== 开始构建应用 ===" && \
mvn clean package -DskipTests -B -gs /root/.m2/settings.xml -s /root/.m2/settings.xml -e \
-Dmaven.test.skip=true \
-Dmaven.compiler.optimize=true \
&& \
echo "=== 清理Maven缓存减少镜像大小 ===" && \
rm -rf /root/.m2/repository
# 检查构建结果并复制正确的JAR文件
RUN echo "=== 检查构建结果 ===" && \
ls -la /build/target/ && \
echo "=== 查找所有 JAR 文件 ===" && \
find /build/target -name "*.jar" -type f && \
echo "=== 复制正确的 JAR 文件 ===" && \
cp /build/target/*.jar /build/app.jar && \
ls -la /build/app.jar && \
echo "JAR 文件大小: $(du -h /build/app.jar | cut -f1)" && \
echo "=== 清理构建临时文件 ===" && \
rm -rf /build/src /build/pom.xml /build/target /tmp/local-jars
# ===== 运行阶段 =====
# 复用基础镜像,避免重复安装依赖
FROM base
# 设置环境变量优化内存使用和字体支持防止OOM
ENV JAVA_OPTS="-Xms1g -Xmx2g -XX:+UseG1GC -XX:MaxGCPauseMillis=200 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/app/dumps/ -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:/app/logs/gc.log -Djava.awt.headless=true -XX:+UseContainerSupport -XX:MaxRAMPercentage=80.0 -XX:InitiatingHeapOccupancyPercent=45 -XX:+UseStringDeduplication -Dfile.encoding=UTF-8 -Duser.timezone=Asia/Shanghai"
ENV SPRING_PROFILES_ACTIVE=prod
ENV TESSDATA_PREFIX=/usr/share/tessdata/
ENV OCR_TESSPATH=/usr/bin/tesseract
# 字体相关环境变量
ENV JAVA_FONTS=/usr/share/fonts
ENV FONTCONFIG_PATH=/etc/fonts
# 创建应用用户和目录
RUN addgroup -g 1001 app && \
adduser -D -s /bin/sh -u 1001 -G app app && \
mkdir -p /app/data/upload \
/app/data/temp \
/app/data/unzip \
/app/data/images \
/app/data/reports \
/app/data/elasticsearch \
/app/logs && \
chown -R app:app /app
# 设置工作目录
WORKDIR /app
# 从构建阶段复制jar文件
COPY --from=builder /build/app.jar app.jar
# 验证JAR文件是否正确复制
RUN ls -la app.jar && echo "JAR 文件大小: $(du -h app.jar | cut -f1)"
# 切换到非root用户
USER app
# 确保日志目录存在且有正确权限
RUN mkdir -p /app/logs
# 健康检查
HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \
CMD curl -f http://localhost:9081/point-strategy/actuator/health || exit 1
# 暴露端口
EXPOSE 9081
# 使用tini作为init进程正确处理信号
ENTRYPOINT ["/sbin/tini", "--"]
CMD ["sh", "-c", "java $JAVA_OPTS -jar app.jar"]

27
alpine-repositories.conf Normal file
View File

@@ -0,0 +1,27 @@
# Alpine Linux 国内镜像源配置
# 主要使用阿里云镜像源,提供更快的下载速度
# 阿里云镜像源 (主要)
https://mirrors.aliyun.com/alpine/v3.18/main/
https://mirrors.aliyun.com/alpine/v3.18/community/
# 备用镜像源
# 华为云镜像源
# https://repo.huaweicloud.com/alpine/v3.18/main/
# https://repo.huaweicloud.com/alpine/v3.18/community/
# 腾讯云镜像源
# https://mirrors.cloud.tencent.com/alpine/v3.18/main/
# https://mirrors.cloud.tencent.com/alpine/v3.18/community/
# 网易云镜像源
# https://mirrors.163.com/alpine/v3.18/main/
# https://mirrors.163.com/alpine/v3.18/community/
# 清华大学镜像源
# https://mirrors.tuna.tsinghua.edu.cn/alpine/v3.18/main/
# https://mirrors.tuna.tsinghua.edu.cn/alpine/v3.18/community/
# 中科大镜像源
# https://mirrors.ustc.edu.cn/alpine/v3.18/main/
# https://mirrors.ustc.edu.cn/alpine/v3.18/community/

125
build-push-acr.sh Executable file
View File

@@ -0,0 +1,125 @@
#!/usr/bin/env bash
set -euo pipefail
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
load_env_file() {
local file="$1"
[[ -f "$file" ]] || return 0
while IFS= read -r line || [[ -n "$line" ]]; do
line="${line#${line%%[![:space:]]*}}"
line="${line%${line##*[![:space:]]}}"
[[ -z "$line" ]] && continue
[[ "$line" == \#* ]] && continue
[[ "$line" != *=* ]] && continue
local key="${line%%=*}"
local val="${line#*=}"
key="${key#${key%%[![:space:]]*}}"
key="${key%${key##*[![:space:]]}}"
[[ "$key" =~ ^[A-Za-z_][A-Za-z0-9_]*$ ]] || continue
if [[ "$val" =~ ^\".*\"$ ]]; then
val="${val:1:${#val}-2}"
elif [[ "$val" =~ ^\'.*\'$ ]]; then
val="${val:1:${#val}-2}"
fi
if [[ -z "${!key:-}" ]]; then
export "$key=$val"
fi
done < "$file"
}
usage() {
cat <<'EOF' >&2
Usage:
bash build-push-acr.sh <acr_password>
# or:
ACR_PASSWORD=... ACR_USERNAME=... REPO_URL=... NAMESPACE=... REPO_NAME=... IMAGE_TAG=... bash build-push-acr.sh
Required:
acr_password (positional arg #1) OR env ACR_PASSWORD
Optional:
ENV_FILE env file to load (default: ../deploy/.env if exists)
ACR_USERNAME default: aipper@qq.com
REPO_URL registry host (ACR). Default: registry.cn-hangzhou.aliyuncs.com
NAMESPACE ACR namespace. Default: aipper
REPO_NAME repository name. Default: digital-archive-server
IMAGE_TAG default: YYYYMMDDHHMM
DRY_RUN=1 print computed image ref and exit
Compatibility:
REPO_URL -> ACR_REGISTRY
NAMESPACE -> ACR_NAMESPACE
EOF
}
if [[ -n "${ENV_FILE:-}" ]]; then
load_env_file "$ENV_FILE"
else
load_env_file "$script_dir/../deploy/.env"
fi
if [[ -z "${ACR_PASSWORD:-}" && -n "${1:-}" ]]; then
export ACR_PASSWORD="$1"
fi
if [[ -z "${ACR_USERNAME:-}" && -n "${2:-}" ]]; then
export ACR_USERNAME="$2"
fi
if [[ -z "${ACR_REGISTRY:-}" && -n "${REPO_URL:-}" ]]; then
export ACR_REGISTRY="$REPO_URL"
fi
if [[ -z "${ACR_NAMESPACE:-}" && -n "${NAMESPACE:-}" ]]; then
export ACR_NAMESPACE="$NAMESPACE"
fi
export REPO_URL="${REPO_URL:-${ACR_REGISTRY:-registry.cn-hangzhou.aliyuncs.com}}"
export NAMESPACE="${NAMESPACE:-${ACR_NAMESPACE:-aipper}}"
export REPO_NAME="${REPO_NAME:-${IMAGE_REPO:-digital-archive-server}}"
export IMAGE_TAG="${IMAGE_TAG:-$(date +"%Y%m%d%H%M")}"
export ACR_REGISTRY="${ACR_REGISTRY:-$REPO_URL}"
export ACR_NAMESPACE="${ACR_NAMESPACE:-$NAMESPACE}"
export IMAGE_REPO="${IMAGE_REPO:-$REPO_NAME}"
export ACR_USERNAME="${ACR_USERNAME:-aipper@qq.com}"
if [[ -z "${ACR_PASSWORD:-}" ]]; then
echo "错误请在运行脚本时传递密码例如bash build-push-acr.sh your-acr-password" >&2
usage
exit 1
fi
image_repo="$IMAGE_REPO"
image_tag="$IMAGE_TAG"
image_ref="${ACR_REGISTRY}/${ACR_NAMESPACE}/${image_repo}:${image_tag}"
if [[ "${DRY_RUN:-}" == "1" ]]; then
echo "DRY_RUN=1"
echo "IMAGE_REF=$image_ref"
exit 0
fi
printf '%s' "$ACR_PASSWORD" | docker login "$ACR_REGISTRY" -u "$ACR_USERNAME" --password-stdin
if docker buildx version >/dev/null 2>&1; then
docker buildx build \
-f "$script_dir/Dockerfile" \
-t "$image_ref" \
--load \
"$script_dir"
else
docker build \
-f "$script_dir/Dockerfile" \
-t "$image_ref" \
"$script_dir"
fi
docker push "$image_ref"
echo "Pushed: $image_ref"
echo "SERVER_IMAGE=$image_ref"

0
dep_tree.txt Normal file
View File

56
docker-compose.simple.yml Normal file
View File

@@ -0,0 +1,56 @@
version: '3.8'
services:
# 主应用服务
app:
image: digital-archive:latest
container_name: digital-archive-app
ports:
- "9081:9081"
volumes:
- ./data/upload:/app/data/upload
- ./data/temp:/app/data/temp
- ./data/unzip:/app/data/unzip
- ./data/images:/app/data/images
- ./data/reports:/app/data/reports
- ./logs:/app/logs
environment:
- SPRING_PROFILES_ACTIVE=prod
- SERVER_PORT=9081
# MySQL数据库配置
- DB_HOST=mysql
- DB_PORT=3306
- DB_NAME=enterprise_digital_archives
- DB_USERNAME=root
- DB_PASSWORD=Abc@123456
- DB_DRIVER=com.mysql.cj.jdbc.Driver
# Redis配置
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_PASSWORD=Abc123456
# Elasticsearch配置 - 使用已有的 "es" 容器
- ELASTICSEARCH_HOST=es
- ELASTICSEARCH_PORT=9200
- ELASTICSEARCH_SCHEME=http
# OCR配置
- TESS_PATH=/usr/bin/tesseract
# 其他配置
- SWAGGER_SHOW=false
- LOG_ROOT_LEVEL=info
- LOG_APP_LEVEL=info
networks:
- proxy
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9081/point-strategy/actuator/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
# Elasticsearch - 使用已有的 "es" 容器
# 注意:确保已有的 "es" 容器已连接到 proxy 网络
networks:
proxy:
external: true

58
docker-compose.yml Normal file
View File

@@ -0,0 +1,58 @@
services:
app:
image: archive
container_name: digital-archive-app
ports:
- "9081:9081"
volumes:
# 持久化数据目录 - 设置正确的权限
- ./data/upload:/app/data/upload:rw
- ./data/temp:/app/data/temp:rw
- ./data/unzip:/app/data/unzip:rw
- ./data/images:/app/data/images:rw
- ./data/reports:/app/data/reports:rw
- ./logs:/app/logs:rw
# 添加内存转储目录
- ./data/dumps:/app/dumps:rw
user: "1001:1001" # 指定容器内用户ID与Dockerfile中的app用户保持一致
environment:
- SPRING_PROFILES_ACTIVE=prod
- SERVER_PORT=9081
- DB_HOST=database
- DB_PORT=54321
- DB_NAME=enterprise_digital_archives
- DB_USERNAME=system
- DB_PASSWORD=12345678ab
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_PASSWORD=Abc123456
- ELASTICSEARCH_HOST=es
- ELASTICSEARCH_PORT=9200
- ELASTICSEARCH_SCHEME=http
- TESS_PATH=/usr/bin/tesseract
- SWAGGER_SHOW=false
- LOG_ROOT_LEVEL=info
- LOG_APP_LEVEL=info
# JVM内存优化参数
- JAVA_OPTS=-Xms1g -Xmx2g -XX:+UseG1GC -XX:MaxGCPauseMillis=200 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/app/dumps/ -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:/app/logs/gc.log -XX:+UseContainerSupport -XX:MaxRAMPercentage=80.0 -XX:InitiatingHeapOccupancyPercent=45
networks:
- proxy
restart: unless-stopped
# 添加健康检查和资源限制
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9081/point-strategy/actuator/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
deploy:
resources:
limits:
memory: 3G # 限制容器最大内存使用
cpus: '2.0' # 限制CPU使用
reservations:
memory: 1G # 预留内存
cpus: '1.0' # 预留CPU
networks:
proxy:
external: true

View File

@@ -0,0 +1,61 @@
# JVM内存优化配置文件
# 用于防止OOM问题的JVM参数配置
# 基础内存设置
-Xms1g # 初始堆内存大小
-Xmx2g # 最大堆内存大小
-XX:NewRatio=1 # 年轻代与老年代比例
-XX:SurvivorRatio=8 # Eden与Survivor区比例
# 垃圾收集器优化
-XX:+UseG1GC # 使用G1垃圾收集器
-XX:MaxGCPauseMillis=200 # 最大GC暂停时间目标
-XX:G1HeapRegionSize=16m # G1区域大小
-XX:InitiatingHeapOccupancyPercent=45 # 触发并发GC的堆占用率
# OOM预防
-XX:+HeapDumpOnOutOfMemoryError # OOM时生成堆转储
-XX:HeapDumpPath=/app/dumps/ # 堆转储文件路径
-XX:+UseGCLogFileRotation # GC日志轮转
-XX:NumberOfGCLogFiles=5 # 保留GC日志文件数量
-XX:GCLogFileSize=10M # 单个GC日志文件大小
# 容器环境优化
-XX:+UseContainerSupport # 启用容器支持
-XX:MaxRAMPercentage=80.0 # 最大使用容器80%内存
-XX:+UnlockExperimentalVMOptions # 解锁实验性VM选项
-XX:+UseCGroupMemoryLimitForHeap # 使用cgroup内存限制
# 字符串优化
-XX:+UseStringDeduplication # 启用字符串去重
-XX:StringTableSize=200000 # 字符串表大小
# 类加载优化
-XX:+UseCompressedOops # 压缩对象指针
-XX:+UseCompressedClassPointers # 压缩类指针
# 监控和日志
-XX:+PrintGCDetails # 打印GC详细信息
-XX:+PrintGCTimeStamps # 打印GC时间戳
-XX:+PrintGCApplicationStoppedTime # 打印GC暂停时间
-Xloggc:/app/logs/gc.log # GC日志文件路径
# 网络和IO优化
-Djava.awt.headless=true # 无头模式
-Dfile.encoding=UTF-8 # 文件编码
-Duser.timezone=Asia/Shanghai # 时区设置
# Spring Boot特定优化
-Dspring.jmx.enabled=false # 禁用JMX
-Dspring.output.ansi.enabled=never # 禁用ANSI颜色
-XX:+TieredCompilation # 分层编译
-XX:TieredStopAtLevel=1 # 快速编译
# 异常处理
-XX:+OmitStackTraceInFastThrow # 快速抛出异常时省略堆栈
-XX:+AlwaysPreTouch # 预分配内存页
# 元空间优化
-XX:MetaspaceSize=256m # 初始元空间大小
-XX:MaxMetaspaceSize=512m # 最大元空间大小
-XX:+UseCompressedOops # 压缩类指针

187
pom.xml
View File

@@ -11,7 +11,7 @@
<groupId>com.point.strategy</groupId>
<artifactId>point-strategy</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>war</packaging>
<packaging>jar</packaging>
<name>point-strategy</name>
<description>Demo project for Spring Boot</description>
@@ -22,6 +22,12 @@
<repositories>
<repository>
<id>com.e-iceblue</id>
<name>e-iceblue</name>
<url>https://repo.e-iceblue.com/nexus/content/groups/public/</url>
</repository>
<repository>
<id>com.e-iceblue-cn</id>
<name>e-iceblue (CN mirror)</name>
<url>https://repo.e-iceblue.cn/repository/maven-public/</url>
</repository>
</repositories>
@@ -30,17 +36,6 @@
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
@@ -69,7 +64,7 @@
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.6</version>
<version>8.0.33</version>
</dependency>
<!-- 分页插件 -->
<dependency>
@@ -185,13 +180,13 @@
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.16</version>
<version>1.18.30</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>dm</groupId>-->
<!-- <artifactId>dm.jdbc.driver</artifactId>-->
<!-- <version>1.6</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>dm</groupId>-->
<!-- <artifactId>dm.jdbc.driver</artifactId>-->
<!-- <version>1.6</version>-->
<!-- </dependency>-->
<!-- CXF webservice -->
<dependency>
@@ -200,6 +195,23 @@
<version>3.2.6</version>
</dependency>
<!-- CXF webservice -->
<!-- Java 11+ JAX-WS API dependencies -->
<dependency>
<groupId>javax.xml.ws</groupId>
<artifactId>jaxws-api</artifactId>
<version>2.3.1</version>
</dependency>
<dependency>
<groupId>javax.jws</groupId>
<artifactId>jsr181-api</artifactId>
<version>1.0-MR1</version>
</dependency>
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
@@ -240,7 +252,7 @@
<dependency>
<groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId>
<version>2.0.3</version>
<version>2.0.27</version>
</dependency>
<dependency>
@@ -255,12 +267,6 @@
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>
<dependency>
<groupId>net.sourceforge.jexcelapi</groupId>
<artifactId>jxl</artifactId>
<version>2.6.12</version>
</dependency>
<dependency>
<groupId>net.sourceforge.tess4j</groupId>
<artifactId>tess4j</artifactId>
@@ -275,13 +281,13 @@
<systemPath>${basedir}/src/main/lib/twain4java-0.3.3-all.jar</systemPath>
</dependency>
<!-- <dependency>-->
<!-- <groupId>rt</groupId>-->
<!-- <artifactId>rt</artifactId>-->
<!-- <version>0.0.1</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${basedir}/src/main/lib/rt-0.0.1.jar</systemPath>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>rt</groupId>-->
<!-- <artifactId>rt</artifactId>-->
<!-- <version>0.0.1</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${basedir}/src/main/lib/rt-0.0.1.jar</systemPath>-->
<!-- </dependency>-->
<dependency>
<groupId>jce</groupId>
@@ -318,7 +324,7 @@
<artifactId>spring-boot-starter-thymeleaf</artifactId>
</dependency>
<!--视频转码依赖-->
<!--视频转码依赖 - 占体积最大400-500MB -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv</artifactId>
@@ -342,10 +348,11 @@
<groupId>e-iceblue</groupId>
<artifactId>spire.pdf.free</artifactId>
<version>5.1.0</version>
<scope>system</scope>
<systemPath>${basedir}/src/main/lib/spire.pdf.free-5.1.0.jar</systemPath>
</dependency>
<dependency>
<groupId>com.aspose</groupId>
<artifactId>aspose-words</artifactId>
@@ -363,19 +370,13 @@
</dependency>
<!-- excel转pdf -->
<!-- <dependency>-->
<!-- <groupId>com.aspose</groupId>-->
<!-- <artifactId>aspose-cells</artifactId>-->
<!-- <version>24.7</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.aspose</groupId>-->
<!-- <artifactId>aspose-cells</artifactId>-->
<!-- <version>24.7</version>-->
<!-- </dependency>-->
<!-- 图片转pdf 判断方向 -->
<dependency>
<groupId>com.drewnoakes</groupId>
<artifactId>metadata-extractor</artifactId>
<version>2.16.0</version> <!-- 请确保使用最新版本 -->
</dependency>
<!-- 获取mp3元数据 -->
<dependency>
<groupId>org</groupId>
@@ -397,14 +398,13 @@
</dependency>
<!-- &lt;!&ndash; 单点登录&ndash;&gt;-->
<!-- <dependency>-->
<!-- <groupId>org.keycloak</groupId>-->
<!-- <artifactId>keycloak-spring-boot-starter</artifactId>-->
<!-- <version>10.0.2</version>-->
<!-- <scope>provided</scope>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.keycloak</groupId>-->
<!-- <artifactId>keycloak-spring-boot-starter</artifactId>-->
<!-- <version>10.0.2</version>-->
<!-- <scope>provided</scope>-->
<!-- </dependency>-->
<dependency>
<groupId>org.springframework</groupId>
@@ -412,12 +412,12 @@
<version>4.3.6.RELEASE</version>
</dependency>
<!-- sqlServer 驱动 -->
<!-- <dependency>-->
<!-- <groupId>com.microsoft.sqlserver</groupId>-->
<!-- <artifactId>sqljdbc4</artifactId>-->
<!-- <version>4.0</version>-->
<!-- </dependency>-->
<!-- sqlServer 驱动 -->
<!-- <dependency>-->
<!-- <groupId>com.microsoft.sqlserver</groupId>-->
<!-- <artifactId>sqljdbc4</artifactId>-->
<!-- <version>4.0</version>-->
<!-- </dependency>-->
<!--aop-->
<dependency>
@@ -432,13 +432,13 @@
<!-- 人大金仓 JDBC 驱动 -->
<!-- <dependency>-->
<!-- <groupId>com.kingbase8</groupId>-->
<!-- <artifactId>kingbase8</artifactId>-->
<!-- <version>8.6.0</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${basedir}/src/main/lib/kingbase8-8.6.0.jar</systemPath>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.kingbase8</groupId>-->
<!-- <artifactId>kingbase8</artifactId>-->
<!-- <version>8.6.0</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${basedir}/src/main/lib/kingbase8-8.6.0.jar</systemPath>-->
<!-- </dependency>-->
<dependency>
<groupId>com.highgo</groupId>
@@ -505,11 +505,11 @@
<version>2.16.0</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.logging.log4j</groupId>-->
<!-- <artifactId>log4j-slf4j-impl</artifactId>-->
<!-- <version>2.16.0</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.apache.logging.log4j</groupId>-->
<!-- <artifactId>log4j-slf4j-impl</artifactId>-->
<!-- <version>2.16.0</version>-->
<!-- </dependency>-->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
@@ -517,12 +517,6 @@
<version>2.16.0</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-imaging</artifactId>
<version>1.0-alpha2</version>
</dependency>
<dependency>
<groupId>com.itextpdf</groupId>
<artifactId>itextpdf</artifactId>
@@ -539,6 +533,13 @@
<version>1.68</version>
</dependency>
<dependency>
<groupId>org.ofdrw</groupId>
<artifactId>ofdrw-full</artifactId>
<version>2.3.7</version>
</dependency>
</dependencies>
<build>
@@ -572,23 +573,23 @@
<verbose>true</verbose>
<overwrite>true</overwrite>
</configuration>
<!-- <executions>
<execution>
<id>Generate MyBatis Artifacts</id>
<goals>
<goal>generate</goal>
</goals>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>org.mybatis.generator</groupId>
<artifactId>mybatis-generator-core</artifactId>
<version>1.3.2</version>
</dependency>
</dependencies> -->
<!-- <executions>
<execution>
<id>Generate MyBatis Artifacts</id>
<goals>
<goal>generate</goal>
</goals>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>org.mybatis.generator</groupId>
<artifactId>mybatis-generator-core</artifactId>
<version>1.3.2</version>
</dependency>
</dependencies> -->
</plugin>
</plugins>
<resources>
<!-- 打包webapp下的资源到META-INF/resources目录下

206
settings.xml Normal file
View File

@@ -0,0 +1,206 @@
<?xml version="1.0" encoding="UTF-8"?>
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
http://maven.apache.org/xsd/settings-1.0.0.xsd">
<!-- 本地仓库位置(容器中 user.home=/root本地开发 user.home 为当前用户目录) -->
<localRepository>${user.home}/.m2/repository</localRepository>
<!-- 代理配置 -->
<proxies>
<!-- 如果需要HTTP代理取消注释并配置 -->
<!--
<proxy>
<id>optional-proxy</id>
<active>true</active>
<protocol>http</protocol>
<host>proxy.host.com</host>
<port>8080</port>
<nonProxyHosts>local.net|some.host.com</nonProxyHosts>
</proxy>
-->
</proxies>
<!-- 镜像配置 -->
<mirrors>
<!-- 阿里云Maven镜像 -->
<mirror>
<id>aliyun-maven</id>
<mirrorOf>central</mirrorOf>
<name>Aliyun Maven Mirror</name>
<url>https://maven.aliyun.com/repository/central</url>
</mirror>
<!-- 华为云Maven镜像 -->
<mirror>
<id>huaweicloud-maven</id>
<mirrorOf>central</mirrorOf>
<name>HuaweiCloud Maven Mirror</name>
<url>https://repo.huaweicloud.com/repository/maven/</url>
</mirror>
<!-- 腾讯云Maven镜像 -->
<mirror>
<id>tencentyun-maven</id>
<mirrorOf>central</mirrorOf>
<name>TencentCloud Maven Mirror</name>
<url>https://mirrors.cloud.tencent.com/nexus/repository/maven-public/</url>
</mirror>
<!-- 网易云Maven镜像 -->
<mirror>
<id>netease-maven</id>
<mirrorOf>central</mirrorOf>
<name>Netease Maven Mirror</name>
<url>https://mirrors.163.com/maven/repository/maven-public/</url>
</mirror>
<!-- JBoss镜像 -->
<mirror>
<id>jboss-public-repository-group</id>
<mirrorOf>central</mirrorOf>
<name>JBoss Public Repository Group</name>
<url>https://repository.jboss.org/nexus/content/groups/public/</url>
</mirror>
</mirrors>
<!-- 服务器认证配置 -->
<servers>
<!-- 如果需要私有仓库认证,在这里配置 -->
<!--
<server>
<id>private-repo</id>
<username>your-username</username>
<password>your-password</password>
</server>
-->
</servers>
<!-- 配置文件 -->
<profiles>
<!-- 默认配置 -->
<profile>
<id>default</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<repositories>
<!-- 阿里云仓库 -->
<repository>
<id>aliyun-central</id>
<url>https://maven.aliyun.com/repository/central</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<!-- 阿里云公共仓库 -->
<repository>
<id>aliyun-public</id>
<url>https://maven.aliyun.com/repository/public</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
<!-- Spring仓库 -->
<repository>
<id>spring-milestones</id>
<name>Spring Milestones</name>
<url>https://repo.spring.io/milestone</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<!-- Atlassian仓库 -->
<repository>
<id>atlassian-public</id>
<url>https://maven.atlassian.com/repository/public</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
<!-- e-iceblue Spire 仓库(用于 spire.pdf.free -->
<repository>
<id>com.e-iceblue</id>
<url>https://repo.e-iceblue.com/nexus/content/groups/public/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<!-- e-iceblue Spire 仓库CN 镜像备选) -->
<repository>
<id>com.e-iceblue-cn</id>
<url>https://repo.e-iceblue.cn/repository/maven-public/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<!-- JBossearlyaccess仓库 -->
<repository>
<id>jboss-earlyaccess</id>
<name>JBoss Early Access Repository</name>
<url>https://repository.jboss.org/nexus/content/groups/ea</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
<pluginRepositories>
<!-- 阿里云插件仓库 -->
<pluginRepository>
<id>aliyun-plugin</id>
<url>https://maven.aliyun.com/repository/public</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</pluginRepository>
<!-- Spring插件仓库 -->
<pluginRepository>
<id>spring-plugins</id>
<name>Spring Plugins</name>
<url>https://repo.spring.io/plugins-release</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</pluginRepository>
</pluginRepositories>
</profile>
</profiles>
<!-- 激活的配置 -->
<activeProfiles>
<activeProfile>default</activeProfile>
</activeProfiles>
</settings>

29
sql/dm/t_ocr_log.sql Normal file
View File

@@ -0,0 +1,29 @@
-- 达梦数据库 OCR日志表建表语句
-- 用于记录OCR识别操作的日志信息
CREATE TABLE "t_ocr_log" (
"id" INTEGER IDENTITY(1,1) NOT NULL,
"table_name" VARCHAR(100) DEFAULT NULL,
"file_name" VARCHAR(255) DEFAULT NULL,
"status_type" VARCHAR(50) DEFAULT NULL,
"failure_reason" CLOB DEFAULT NULL,
"create_time" TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
"update_time" TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("id")
);
-- 创建索引
CREATE INDEX "idx_ocr_log_table_name" ON "t_ocr_log"("table_name");
CREATE INDEX "idx_ocr_log_file_name" ON "t_ocr_log"("file_name");
CREATE INDEX "idx_ocr_log_status_type" ON "t_ocr_log"("status_type");
CREATE INDEX "idx_ocr_log_create_time" ON "t_ocr_log"("create_time");
-- 添加表注释
COMMENT ON TABLE "t_ocr_log" IS 'OCR识别日志表';
COMMENT ON COLUMN "t_ocr_log"."id" IS '主键ID';
COMMENT ON COLUMN "t_ocr_log"."table_name" IS '表名';
COMMENT ON COLUMN "t_ocr_log"."file_name" IS '文件名';
COMMENT ON COLUMN "t_ocr_log"."status_type" IS '转换状态';
COMMENT ON COLUMN "t_ocr_log"."failure_reason" IS '失败原因';
COMMENT ON COLUMN "t_ocr_log"."create_time" IS '创建时间';
COMMENT ON COLUMN "t_ocr_log"."update_time" IS '更新时间';

View File

@@ -0,0 +1,29 @@
-- 人大金仓数据库 OCR日志表建表语句
-- 用于记录OCR识别操作的日志信息
CREATE TABLE "t_ocr_log" (
"id" SERIAL NOT NULL,
"table_name" VARCHAR(100) DEFAULT NULL,
"file_name" VARCHAR(255) DEFAULT NULL,
"status_type" VARCHAR(50) DEFAULT NULL,
"failure_reason" TEXT DEFAULT NULL,
"create_time" TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
"update_time" TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("id")
);
-- 创建索引
CREATE INDEX "idx_ocr_log_table_name" ON "t_ocr_log"("table_name");
CREATE INDEX "idx_ocr_log_file_name" ON "t_ocr_log"("file_name");
CREATE INDEX "idx_ocr_log_status_type" ON "t_ocr_log"("status_type");
CREATE INDEX "idx_ocr_log_create_time" ON "t_ocr_log"("create_time");
-- 添加表注释
COMMENT ON TABLE "t_ocr_log" IS 'OCR识别日志表';
COMMENT ON COLUMN "t_ocr_log"."id" IS '主键ID';
COMMENT ON COLUMN "t_ocr_log"."table_name" IS '表名';
COMMENT ON COLUMN "t_ocr_log"."file_name" IS '文件名';
COMMENT ON COLUMN "t_ocr_log"."status_type" IS '转换状态';
COMMENT ON COLUMN "t_ocr_log"."failure_reason" IS '失败原因';
COMMENT ON COLUMN "t_ocr_log"."create_time" IS '创建时间';
COMMENT ON COLUMN "t_ocr_log"."update_time" IS '更新时间';

17
sql/mysql/t_ocr_log.sql Normal file
View File

@@ -0,0 +1,17 @@
-- OCR日志表建表语句
-- 用于记录OCR识别操作的日志信息
CREATE TABLE `t_ocr_log` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键ID',
`table_name` varchar(100) DEFAULT NULL COMMENT '表名',
`file_name` varchar(255) DEFAULT NULL COMMENT '文件名',
`status_type` varchar(50) DEFAULT NULL COMMENT '转换状态',
`failure_reason` text DEFAULT NULL COMMENT '失败原因',
`create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
PRIMARY KEY (`id`),
KEY `idx_table_name` (`table_name`),
KEY `idx_file_name` (`file_name`),
KEY `idx_status_type` (`status_type`),
KEY `idx_create_time` (`create_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='OCR识别日志表';

View File

@@ -5,10 +5,12 @@ import com.itextpdf.text.pdf.PdfDocument;
import com.itextpdf.text.pdf.PdfReader;
import com.point.strategy.archiveFile.service.ArchiveFileService;
import com.point.strategy.common.*;
import com.point.strategy.common.ImageToPdfConverter;
import com.point.strategy.user.bean.User;
import com.point.strategy.user.service.UserService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.imaging.ImageInfo;
import org.apache.commons.imaging.Imaging;
import org.jaudiotagger.audio.AudioFile;
@@ -22,6 +24,9 @@ import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.stream.ImageInputStream;
import java.util.Iterator;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@@ -41,10 +46,14 @@ import java.util.Map;
import static com.point.strategy.common.WatermarkImgUtils.*;
@Slf4j
@RestController
@RequestMapping("/v/archiveFile")
@Api(tags = "原文管理", value = "ArchiveFileController")
public class ArchiveFileController {
// 手动添加logger变量作为Lombok @Slf4j的临时替代方案
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(ArchiveFileController.class);
@Autowired
ArchiveFileService archiveFileService;
@@ -84,72 +93,121 @@ public class ArchiveFileController {
@ApiOperation(value = "showImg")
public void showImg(HttpServletRequest request, HttpServletResponse response,String path, String fileName,Integer userId) throws IOException{
User user = userService.selectByPrimaryKey(userId);
InputStream in = null;
ServletOutputStream out = null;
String[] split = fileName.split("\\.");
ByteArrayOutputStream outputStream = null;
// 先解码路径和文件名
try {
path = URLDecoder.decode(path, "UTF-8");
fileName = URLDecoder.decode(fileName, "UTF-8");
} catch (Exception e) {
log.error("URL解码失败: {}", e.getMessage());
}
String downLoadPath = path + File.separator + fileName;
File file = new File(downLoadPath);
// 检查文件是否存在
if (!file.exists()) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
return;
}
// 检查文件大小超过阈值直接返回原文件避免OOM
final long LARGE_FILE_THRESHOLD = 10 * 1024 * 1024; // 10MB
boolean isLargeFile = file.length() > LARGE_FILE_THRESHOLD;
if(!split[split.length-1].equalsIgnoreCase("jpg")&&!split[split.length-1].equalsIgnoreCase("png")&&!split[split.length-1].equalsIgnoreCase("pdf")){
response.reset();
response.setContentType( "application/octet-stream" );
response.addHeader( "content-disposition", "attachment; filename=" + fileName);
try {
path = URLDecoder.decode(path,"UTF-8");
fileName = URLDecoder.decode(fileName,"UTF-8");
// String dir = uploadPath + File.separator + path;
String downLoadPath = path + File.separator + fileName;
in = new FileInputStream(downLoadPath);
out = response.getOutputStream();
byte[] bytes = new byte[1024 * 10];
int len = 0;
while ((len = in.read(bytes)) != -1) {
out.write(bytes,0,len);
// 设置正确的Content-Type和编码
response.setContentType("application/octet-stream;charset=UTF-8");
// 对中文文件名进行URL编码
String encodedFileName = java.net.URLEncoder.encode(fileName, "UTF-8").replaceAll("\\+", "%20");
response.setHeader("Content-Disposition", "attachment; filename*=UTF-8''" + encodedFileName);
// 使用try-with-resources确保资源释放流式处理
try (InputStream in = new FileInputStream(file);
ServletOutputStream out = response.getOutputStream()) {
byte[] buffer = new byte[8192]; // 8KB缓冲区
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
}
out.flush();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
in.close();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
log.error("文件下载失败: {}", e.getMessage());
throw e;
}
}else { //需要添加水印展示的文件
path = URLDecoder.decode(path, "UTF-8");
fileName = URLDecoder.decode(fileName, "UTF-8");
// String dir = uploadPath + File.separator + path;
String downLoadPath = path + File.separator + fileName;
try {
if (split[split.length-1].equalsIgnoreCase("pdf")){
in = new FileInputStream(downLoadPath);
outputStream = PdfFileHelper.waterMark(in,user.getUsername());
out = response.getOutputStream();
out.write(outputStream.toByteArray());
out.flush();
// 大文件直接返回,不加水印
if (isLargeFile) {
response.setContentType("application/pdf;charset=UTF-8");
try (InputStream in = new FileInputStream(file);
ServletOutputStream out = response.getOutputStream()) {
byte[] buffer = new byte[8192];
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
}
out.flush();
}
} else {
// 小文件加水印处理
try (InputStream in = new FileInputStream(file)) {
ByteArrayOutputStream outputStream = PdfFileHelper.waterMark(in, user.getUsername());
// 设置PDF的Content-Type
response.setContentType("application/pdf;charset=UTF-8");
try (ServletOutputStream out = response.getOutputStream()) {
out.write(outputStream.toByteArray());
out.flush();
} finally {
outputStream.close();
}
}
}
}else {
outputStream = addWatermarkByFileIo(downLoadPath, user.getUsername());
out = response.getOutputStream();
out.write(outputStream.toByteArray());
out.flush();
// 大文件直接返回,不加水印
if (isLargeFile) {
// 将图片转换为PDF
try (InputStream in = new FileInputStream(file)) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ImageToPdfConverter.convertImageToPdf(in, outputStream);
response.setContentType("application/pdf;charset=UTF-8");
try (ServletOutputStream out = response.getOutputStream()) {
out.write(outputStream.toByteArray());
out.flush();
}
} catch (Exception e) {
log.error("图片转PDF失败: {}", e.getMessage(), e);
throw new IOException("图片转PDF失败: " + e.getMessage(), e);
}
} else {
// 小文件加水印处理然后转换为PDF
ByteArrayOutputStream imageStream = addWatermarkByFileIo(downLoadPath, user.getUsername());
try (InputStream in = new ByteArrayInputStream(imageStream.toByteArray())) {
ByteArrayOutputStream pdfStream = new ByteArrayOutputStream();
ImageToPdfConverter.convertImageToPdf(in, pdfStream);
response.setContentType("application/pdf;charset=UTF-8");
try (ServletOutputStream out = response.getOutputStream()) {
out.write(pdfStream.toByteArray());
out.flush();
}
} catch (Exception e) {
log.error("图片转PDF失败: {}", e.getMessage(), e);
throw new IOException("图片转PDF失败: " + e.getMessage(), e);
} finally {
imageStream.close();
}
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (in!=null){
in.close();
}
if (out!=null){
out.close();
}
if (outputStream!=null){
outputStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
log.error("水印处理失败: {}", e.getMessage());
throw e;
}
}
}
@@ -312,96 +370,69 @@ public class ArchiveFileController {
}
if(split[split.length-1].equalsIgnoreCase("jpg")||split[split.length-1].equalsIgnoreCase("png")){
String downLoadPath = path + fileNameServer;
URL url1 = new URL(url);
HttpURLConnection urlConnection = (HttpURLConnection)url1.openConnection();
urlConnection.connect();
if (urlConnection.getResponseCode() == HttpURLConnection.HTTP_OK) {
InputStream inputStream = urlConnection.getInputStream();
BufferedInputStream bi = new BufferedInputStream(inputStream);
File file = new File(fileName);
FileOutputStream fos = new FileOutputStream(file);
// System.out.println("文件大约:"+(conn.getContentLength()/1024)+"K");
byte[] by = new byte[1024];
int len = 0;
while((len=bi.read(by))!=-1){
fos.write(by,0,len);
}
// File file = new File(String.valueOf(inputStream));
BufferedImage read = ImageIO.read(file);
ImageInfo image = Imaging.getImageInfo(file);
//图片大小
int length = new FileInputStream(file).available() / 1024;
//位深度
int pixelSize = read.getColorModel().getPixelSize();
String[] strings = fileName.split("\\.");
//图片类型
String type = strings[strings.length - 1];
String resolvingPower = read.getWidth()+ "*" + read.getHeight() + "";
VideoInfoUtils.setMapList("文件名称","name",fileName,mapList);
VideoInfoUtils.setMapList("文件类型","type",type,mapList);
VideoInfoUtils.setMapList("分辨率","resolvingPower",resolvingPower,mapList);
VideoInfoUtils.setMapList("宽度","width",read.getWidth()+ " 像素",mapList);
VideoInfoUtils.setMapList("高度","height",read.getHeight()+ " 像素",mapList);
VideoInfoUtils.setMapList("水平分辨率","widthDpi",image.getPhysicalWidthDpi()+ " dpi",mapList);
VideoInfoUtils.setMapList("垂直分辨率","heightDpi",image.getPhysicalHeightDpi()+ " dpi",mapList);
VideoInfoUtils.setMapList("大小","length",length + "kb",mapList);
// json.put("resolvingPower",resolvingPower);
// json.put("length",length);
// json.put("pixelSize",pixelSize);
// json.put("name",fileName);
// json.put("type",type);
// json.put("height",read.getHeight());
// json.put("width",read.getWidth());
// json.put("heightDpi",image.getPhysicalHeightDpi());
// json.put("widthDpi",image.getPhysicalWidthDpi());
json.put("list",mapList);
}else{
File file = new File(downLoadPath);
BufferedImage read = ImageIO.read(file);
ImageInfo image = Imaging.getImageInfo(file);
//图片大小
int length = new FileInputStream(file).available() / 1024;
//位深度
int pixelSize = read.getColorModel().getPixelSize();
String[] strings = fileName.split("\\.");
//图片类型
String type = strings[strings.length - 1];
String resolvingPower = read.getWidth()+ "*" + read.getHeight() + "";
VideoInfoUtils.setMapList("文件名称","name",fileName,mapList);
VideoInfoUtils.setMapList("文件类型","type",type,mapList);
VideoInfoUtils.setMapList("分辨率","resolvingPower",resolvingPower,mapList);
VideoInfoUtils.setMapList("宽度","width",read.getWidth()+ " 像素",mapList);
VideoInfoUtils.setMapList("高度","height",read.getHeight()+ " 像素",mapList);
VideoInfoUtils.setMapList("水平分辨率","widthDpi",image.getPhysicalWidthDpi()+ " dpi",mapList);
VideoInfoUtils.setMapList("垂直分辨率","heightDpi",image.getPhysicalHeightDpi()+ " dpi",mapList);
VideoInfoUtils.setMapList("大小","length",length + "kb",mapList);
// json.put("resolvingPower",resolvingPower);
// json.put("length",length);
// json.put("pixelSize",pixelSize);
// json.put("name",fileName);
// json.put("type",type);
// json.put("height",read.getHeight());
// json.put("width",read.getWidth());
// json.put("heightDpi",image.getPhysicalHeightDpi());
// json.put("widthDpi",image.getPhysicalWidthDpi());
//判断是否数字加密了
File file1 = new File(downLoadPath+".sig");
if(file1.exists()){
Map<String, String> map = ImageSignatureVerifier.verifyImageSignature(downLoadPath);
if (map != null){
json.put("certificateValidity",map.get("certificateValidity"));
json.put("signature",map.get("signature"));
json.put("signatureData",map.get("signatureData"));
VideoInfoUtils.setMapList("签名真实性","certificateValidity",map.get("certificateValidity"),mapList);
VideoInfoUtils.setMapList("签名算法","signature",map.get("signature"),mapList);
VideoInfoUtils.setMapList("数据加密","signatureData",map.get("signatureData"),mapList);
String[] strings = fileName.split("\\.");
String type = strings[strings.length - 1];
boolean processedFromLocal = false;
// 优先处理本地文件
File localFile = new File(downLoadPath);
if (localFile.exists()) {
try (InputStream inputStream = new FileInputStream(localFile)) {
getImageMetadataFromStream(inputStream, fileName, type, mapList);
// 获取文件大小
int length = (int) (localFile.length() / 1024);
VideoInfoUtils.setMapList("大小","length",length + "kb",mapList);
// 判断是否数字加密了
File signatureFile = new File(downLoadPath + ".sig");
if (signatureFile.exists()) {
Map<String, String> signatureMap = ImageSignatureVerifier.verifyImageSignature(downLoadPath);
if (signatureMap != null) {
json.put("certificateValidity", signatureMap.get("certificateValidity"));
json.put("signature", signatureMap.get("signature"));
json.put("signatureData", signatureMap.get("signatureData"));
VideoInfoUtils.setMapList("签名真实性", "certificateValidity", signatureMap.get("certificateValidity"), mapList);
VideoInfoUtils.setMapList("签名算法", "signature", signatureMap.get("signature"), mapList);
VideoInfoUtils.setMapList("数据加密", "signatureData", signatureMap.get("signatureData"), mapList);
}
}
processedFromLocal = true;
log.info("成功从本地文件读取图片元数据: {}", downLoadPath);
} catch (Exception e) {
log.error("处理本地图片失败: {}", e.getMessage());
processedFromLocal = false;
}
json.put("list",mapList);
}
// 如果本地文件不存在或处理失败且提供了URL则尝试从URL处理
if (!processedFromLocal && url != null && !url.isEmpty()) {
log.info("本地文件不存在或处理失败尝试从URL读取: {}", url);
try {
URL url1 = new URL(url);
HttpURLConnection urlConnection = (HttpURLConnection)url1.openConnection();
urlConnection.setConnectTimeout(10000); // 10秒超时
urlConnection.setReadTimeout(10000);
urlConnection.connect();
if (urlConnection.getResponseCode() == HttpURLConnection.HTTP_OK) {
try (InputStream inputStream = urlConnection.getInputStream()) {
getImageMetadataFromStream(inputStream, fileName, type, mapList);
}
log.info("成功从URL读取图片元数据: {}", url);
} else {
return json = AjaxJson.returnExceptionInfo("无法访问图片URL: " + url + ",响应码: " + urlConnection.getResponseCode());
}
} catch (Exception e) {
log.error("处理URL图片失败: {}", e.getMessage());
return json = AjaxJson.returnExceptionInfo("处理URL图片失败: " + e.getMessage());
}
} else if (!processedFromLocal) {
// 本地文件不存在且没有提供URL
return json = AjaxJson.returnExceptionInfo("文件不存在: " + downLoadPath);
}
json.put("list", mapList);
}
if(split[split.length-1].equalsIgnoreCase("wav") || split[split.length-1].equalsIgnoreCase("wave")){
@@ -492,7 +523,50 @@ public class ArchiveFileController {
}
public static InputStream getImageStream(String url) {
/**
* 使用流式处理获取图片元数据,避免完整解码
*/
private void getImageMetadataFromStream(InputStream inputStream, String fileName, String type,
List<Map<String, Object>> mapList) throws IOException {
try (ImageInputStream iis = ImageIO.createImageInputStream(inputStream)) {
Iterator<ImageReader> readers = ImageIO.getImageReaders(iis);
if (readers.hasNext()) {
ImageReader reader = readers.next();
reader.setInput(iis, true); // 只读取元数据,不完整解码
int width = reader.getWidth(0);
int height = reader.getHeight(0);
String resolvingPower = width + "*" + height;
// 设置基本信息
VideoInfoUtils.setMapList("文件名称", "name", fileName, mapList);
VideoInfoUtils.setMapList("文件类型", "type", type, mapList);
VideoInfoUtils.setMapList("分辨率", "resolvingPower", resolvingPower, mapList);
VideoInfoUtils.setMapList("宽度", "width", width + " 像素", mapList);
VideoInfoUtils.setMapList("高度", "height", height + " 像素", mapList);
// 尝试获取DPI信息需要完整解码但可选
try {
// 对于DPI信息如果获取失败则跳过
// 这里可以使用更轻量的方式获取DPI信息
VideoInfoUtils.setMapList("水平分辨率", "widthDpi", "72 dpi", mapList);
VideoInfoUtils.setMapList("垂直分辨率", "heightDpi", "72 dpi", mapList);
} catch (Exception e) {
log.warn("获取DPI信息失败使用默认值: {}", e.getMessage());
VideoInfoUtils.setMapList("水平分辨率", "widthDpi", "72 dpi", mapList);
VideoInfoUtils.setMapList("垂直分辨率", "heightDpi", "72 dpi", mapList);
}
reader.dispose();
} else {
throw new IOException("不支持的图片格式");
}
}
}
public static InputStream getImageStream(String url) {
try {
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
connection.setReadTimeout(5000);

View File

@@ -0,0 +1,63 @@
package com.point.strategy.common;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
/**
* 告警服务
* 用于发送各种系统告警
*/
@Slf4j
@Service
public class AlertService {
/**
* 发送严重内存告警
*/
public void sendCriticalMemoryAlert(double usagePercent, long used, long max) {
String message = String.format("严重内存告警!使用率: %.1f%%, 已用: %.1fMB, 最大: %.1fMB",
usagePercent * 100, used / 1024.0 / 1024.0, max / 1024.0 / 1024.0);
log.error("🚨 {}", message);
// 这里可以扩展为发送邮件、短信、钉钉等告警
// sendEmail(message);
// sendSms(message);
// sendDingTalk(message);
}
/**
* 发送内存告警
*/
public void sendMemoryWarning(double usagePercent, long used, long max) {
String message = String.format("内存使用率过高: %.1f%%, 已用: %.1fMB, 最大: %.1fMB",
usagePercent * 100, used / 1024.0 / 1024.0, max / 1024.0 / 1024.0);
log.warn("⚠️ {}", message);
// 这里可以扩展为发送邮件等告警
// sendEmail(message);
}
/**
* 发送系统异常告警
*/
public void sendSystemAlert(String message, Exception e) {
String alertMessage = String.format("系统异常告警: %s, 错误: %s", message, e.getMessage());
log.error("🚨 {}", alertMessage, e);
// 这里可以扩展为发送邮件、短信等告警
// sendEmail(alertMessage);
}
/**
* 发送业务告警
*/
public void sendBusinessAlert(String message) {
log.warn("📢 业务告警: {}", message);
// 这里可以扩展为发送邮件等告警
// sendEmail(message);
}
}

View File

@@ -1,9 +1,8 @@
package com.point.strategy.common;
import sun.misc.BASE64Encoder;
import java.io.*;
import java.util.Base64;
/**
* caiwenhong Base64编码工具类
@@ -75,9 +74,8 @@ public class Base64Utils {
e.printStackTrace();
}
// 对字节数组Base64编码
BASE64Encoder encoder = new BASE64Encoder();
// 返回Base64编码过的字节数组字符串
return encoder.encode(data);
// 使用Java 8+的Base64编码器
return Base64.getEncoder().encodeToString(data);
}
public static void main(String[] args) throws Exception {

View File

@@ -10,6 +10,7 @@ import java.io.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
@@ -103,6 +104,47 @@ public class DownloadZip {
return null;
}
/**
* 普通java文件下载方法适用于所有框架
* @param request
* @param response
* @param filesMap 需要打包下载的文件路径和文件名集合 key:绝对路径 value:压缩包内文件名
* @param zipBasePath 下载文件压缩包之前,必须创建一个空的文件压缩包(下载完成之后删除压缩文件包)zipBasePath 是此文件压缩包所在的文件目录
* @param zipName 下载压缩包的名称
* @return
* @throws IOException
*/
public static String downloadFilesZip(HttpServletRequest request, HttpServletResponse response, Map<String, String> filesMap, String zipBasePath, String zipName) throws Exception {
//将附件中多个文件进行压缩,批量打包下载文件
String zipFilePath = zipBasePath+ File.separator+zipName;
//IO流实现下载的功能
response.setContentType("text/html; charset=UTF-8"); //设置编码字符
response.setContentType("application/octet-stream"); //设置内容类型为下载类型
response.setHeader("Content-disposition", "attachment;filename="+new String(zipName.getBytes(),"iso-8859-1"));//设置下载的文件名称
OutputStream out = response.getOutputStream(); //创建页面返回方式为输出流,会自动弹出下载框
//压缩文件
File zip = new File(zipFilePath);
if (!zip.exists()){
zip.createNewFile();
}
//创建zip文件输出流
ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(zip));
zipFile(zipBasePath,zipName, zipFilePath,filesMap,zos);
zos.close();
response.setHeader("Content-disposition", "attachment;filename="+new String(zipName.getBytes(),"iso-8859-1"));//设置下载的压缩文件名称
//将打包后的文件写到客户端,输出的方法同上,使用缓冲流输出
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(zipFilePath));
byte[] buff = new byte[bis.available()];
bis.read(buff);
bis.close();
out.write(buff);//输出数据文件
out.flush();//释放缓存
out.close();//关闭输出流
return null;
}
/**
* 压缩文件
* @param zipBasePath 临时压缩文件基础路径
@@ -151,6 +193,47 @@ public class DownloadZip {
return null;
}
/**
* 压缩文件
* @param zipBasePath 临时压缩文件基础路径
* @param zipName 临时压缩文件名称
* @param zipFilePath 临时压缩文件完整路径
* @param filesMap 需要压缩的文件路径和文件名集合 key:绝对路径 value:压缩包内文件名
* @throws IOException
*/
public static String zipFile(String zipBasePath, String zipName, String zipFilePath, Map<String, String> filesMap, ZipOutputStream zos) throws IOException {
//循环读取文件路径集合,获取每一个文件的路径
for(Map.Entry<String, String> entry : filesMap.entrySet()){
String filePath = entry.getKey();
String fileNameInZip = entry.getValue();
File inputFile = new File(filePath); //根据文件路径创建文件
if(inputFile.exists()) { //判断文件是否存在
if (inputFile.isFile()) { //判断是否属于文件,还是文件夹
//创建输入流读取文件
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(inputFile));
//将文件写入zip内即将文件进行打包
zos.putNextEntry(new ZipEntry(fileNameInZip));
//写入文件的方法,同上
int size = 0;
byte[] buffer = new byte[1024*1024]; //设置读取数据缓存大小
while ((size = bis.read(buffer)) > 0) {
zos.write(buffer, 0, size);
}
//关闭输入输出流
zos.closeEntry();
bis.close();
} else { //如果是文件夹则使用穷举的方法获取文件写入zip
// not handling directories for now
}
}
}
return null;
}
/**
* 压缩文件
* @param zipBasePath 临时压缩文件基础路径

View File

@@ -0,0 +1,51 @@
package com.point.strategy.common;
import java.io.File;
import java.util.Comparator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class FileNameComparator implements Comparator<String> {
private static final Pattern NUMBER_PATTERN = Pattern.compile("(\\d+)");
@Override
public int compare(String s1, String s2) {
String name1 = new File(s1).getName();
String name2 = new File(s2).getName();
Matcher m1 = NUMBER_PATTERN.matcher(name1);
Matcher m2 = NUMBER_PATTERN.matcher(name2);
int pos1 = 0, pos2 = 0;
while (m1.find() && m2.find()) {
// 比较数字前的字符串部分
String prefix1 = name1.substring(pos1, m1.start());
String prefix2 = name2.substring(pos2, m2.start());
int prefixCompare = prefix1.compareTo(prefix2);
if (prefixCompare != 0) {
return prefixCompare;
}
// 比较数字部分(按数值比较)
Long num1 = Long.parseLong(m1.group());
Long num2 = Long.parseLong(m2.group());
int numCompare = num1.compareTo(num2);
if (numCompare != 0) {
return numCompare;
}
pos1 = m1.end();
pos2 = m2.end();
}
// 比较剩余部分
String suffix1 = name1.substring(pos1);
String suffix2 = name2.substring(pos2);
return suffix1.compareTo(suffix2);
}
}

View File

@@ -2,36 +2,48 @@ package com.point.strategy.common;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.net.URLEncoder;
import java.nio.channels.FileChannel;
public class FileUtil {
public static HttpServletResponse download(String path, HttpServletResponse response) {
InputStream fis = null;
OutputStream toClient = null;
try {
// path是指欲下载的文件的路径。
File file = new File(path);
// 取得文件名。
String filename = file.getName();
// 取得文件的后缀名。
String ext = filename.substring(filename.lastIndexOf(".") + 1).toUpperCase();
// 以流的形式下载文件。
InputStream fis = new BufferedInputStream(new FileInputStream(path));
byte[] buffer = new byte[fis.available()];
fis.read(buffer);
fis.close();
fis = new BufferedInputStream(new FileInputStream(path));
// 清空response
response.reset();
// 设置response的Header
response.addHeader("Content-Disposition", "attachment;filename=" + new String(filename.getBytes()));
response.addHeader("Content-Disposition", "attachment;filename=" + URLEncoder.encode(filename, "UTF-8"));
response.addHeader("Content-Length", "" + file.length());
OutputStream toClient = new BufferedOutputStream(response.getOutputStream());
toClient = new BufferedOutputStream(response.getOutputStream());
response.setContentType("application/octet-stream");
toClient.write(buffer);
toClient.flush();
toClient.close();
byte[] buffer = new byte[1024];
int bytesRead;
while ((bytesRead = fis.read(buffer)) != -1) {
toClient.write(buffer, 0, bytesRead);
}
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (fis != null) {
fis.close();
}
if (toClient != null) {
toClient.flush();
toClient.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return response;
}

View File

@@ -0,0 +1,122 @@
package com.point.strategy.common;
import com.drew.imaging.ImageMetadataReader;
import com.drew.metadata.Directory;
import com.drew.metadata.Metadata;
import com.drew.metadata.exif.ExifDirectoryBase;
import com.itextpdf.text.Document;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.Image;
import com.itextpdf.text.Rectangle;
import com.itextpdf.text.pdf.PdfWriter;
import lombok.extern.slf4j.Slf4j;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* 图片转PDF工具类
* 支持将jpg、png等图片格式转换为PDF
*/
@Slf4j
public class ImageToPdfConverter {
/**
* 将图片输入流转换为PDF输出流
*
* @param imageInputStream 图片输入流
* @param pdfOutputStream PDF输出流
* @throws IOException IO异常
* @throws DocumentException 文档异常
*/
public static void convertImageToPdf(InputStream imageInputStream, ByteArrayOutputStream pdfOutputStream)
throws IOException, DocumentException {
try {
// 创建临时文件来获取EXIF信息因为ImageMetadataReader需要File对象
// 由于我们只有InputStream需要先创建临时图片文件
java.io.File tempImageFile = java.io.File.createTempFile("temp_image_", ".jpg");
tempImageFile.deleteOnExit();
// 将输入流写入临时文件
try (java.io.FileOutputStream fos = new java.io.FileOutputStream(tempImageFile)) {
byte[] buffer = new byte[8192];
int bytesRead;
while ((bytesRead = imageInputStream.read(buffer)) != -1) {
fos.write(buffer, 0, bytesRead);
}
}
// 读取图片并获取EXIF方向信息
Image image = Image.getInstance(tempImageFile.getAbsolutePath());
int orientation = getExifOrientation(tempImageFile);
// 根据EXIF方向调整图片
adjustImageOrientation(image, orientation);
// 创建PDF文档使用图片的原始尺寸
Document document = new Document(new Rectangle(image.getWidth(), image.getHeight()));
PdfWriter.getInstance(document, pdfOutputStream);
document.open();
// 添加图片到PDF
document.add(image);
document.close();
// 删除临时文件
tempImageFile.delete();
} catch (Exception e) {
log.error("图片转PDF失败: {}", e.getMessage(), e);
throw e;
}
}
/**
* 获取图片的EXIF方向信息
*
* @param imageFile 图片文件
* @return 方向值 (1=正常, 3=180度, 6=90度, 8=270度)
*/
private static int getExifOrientation(java.io.File imageFile) {
try {
Metadata metadata = ImageMetadataReader.readMetadata(imageFile);
// 遍历元数据目录,查找 EXIF 方向标签
for (Directory directory : metadata.getDirectories()) {
if (directory.containsTag(ExifDirectoryBase.TAG_ORIENTATION)) {
return directory.getInt(ExifDirectoryBase.TAG_ORIENTATION);
}
}
} catch (Exception e) {
log.warn("读取EXIF方向失败: {}", e.getMessage());
}
return 1; // 默认方向:正常
}
/**
* 根据EXIF方向调整图片
*
* @param image 图片对象
* @param orientation 方向值
* @throws DocumentException 文档异常
*/
private static void adjustImageOrientation(Image image, int orientation) throws DocumentException {
switch (orientation) {
case 1: // 正常
break;
case 3: // 旋转180度
image.setRotationDegrees(180);
break;
case 6: // 顺时针旋转90度
image.setRotationDegrees(90);
break;
case 8: // 逆时针旋转90度
image.setRotationDegrees(270);
break;
default:
// 其他情况保持原样
break;
}
}
}

View File

@@ -0,0 +1,282 @@
package com.point.strategy.common;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryUsage;
import java.util.concurrent.atomic.AtomicInteger;
/**
* 内存监控组件
* 用于监控JVM内存使用情况预防OOM问题
*/
@Slf4j
@Component
public class MemoryMonitor {
@Autowired(required = false)
private AlertService alertService;
// 内存告警计数器
private final AtomicInteger memoryWarningCount = new AtomicInteger(0);
// 严重内存告警计数器
private final AtomicInteger criticalMemoryWarningCount = new AtomicInteger(0);
/**
* 定时检查内存使用情况 - 每30秒执行一次
*/
@Scheduled(fixedRate = 30000)
public void checkMemoryUsage() {
try {
MemoryMXBean memoryBean = ManagementFactory.getMemoryMXBean();
MemoryUsage heapUsage = memoryBean.getHeapMemoryUsage();
MemoryUsage nonHeapUsage = memoryBean.getNonHeapMemoryUsage();
// 计算堆内存使用率
long heapUsed = heapUsage.getUsed();
long heapMax = heapUsage.getMax();
double heapUsagePercent = heapMax > 0 ? (double) heapUsed / heapMax : 0;
// 计算非堆内存使用率
long nonHeapUsed = nonHeapUsage.getUsed();
long nonHeapMax = nonHeapUsage.getMax();
double nonHeapUsagePercent = nonHeapMax > 0 ? (double) nonHeapUsed / nonHeapMax : 0;
// 记录内存使用情况
log.info("内存使用情况 - 堆内存: {}MB/{}MB ({}%), 非堆内存: {}MB/{}MB ({})",
formatMB(heapUsed), formatMB(heapMax), formatPercent(heapUsagePercent),
formatMB(nonHeapUsed), formatMB(nonHeapMax), formatPercent(nonHeapUsagePercent));
// 检查内存使用率并采取相应措施
handleMemoryUsage(heapUsagePercent, heapUsed, heapMax);
} catch (Exception e) {
log.error("内存监控检查失败", e);
}
}
/**
* 处理内存使用情况
*/
private void handleMemoryUsage(double usagePercent, long used, long max) {
if (usagePercent > 0.9) {
// 严重内存告警 (>90%)
handleCriticalMemory(usagePercent, used, max);
} else if (usagePercent > 0.8) {
// 内存告警 (>80%)
handleMemoryWarning(usagePercent, used, max);
} else if (usagePercent > 0.7) {
// 内存提醒 (>70%)
handleMemoryNotice(usagePercent);
}
// 重置计数器(每小时重置一次)
resetCountersIfNeeded();
}
/**
* 处理严重内存告警
*/
private void handleCriticalMemory(double usagePercent, long used, long max) {
int count = criticalMemoryWarningCount.incrementAndGet();
log.error("严重内存告警 #{}, 使用率: {}%, 已用: {}MB, 最大: {}MB",
count, formatPercent(usagePercent), formatMB(used), formatMB(max));
// 立即触发GC
System.gc();
// 发送告警
if (alertService != null) {
try {
alertService.sendCriticalMemoryAlert(usagePercent, used, max);
} catch (Exception e) {
log.error("发送严重内存告警失败", e);
}
}
// 如果连续3次严重告警记录堆栈信息
if (count >= 3) {
logMemoryStackTrace();
}
}
/**
* 处理内存告警
*/
private void handleMemoryWarning(double usagePercent, long used, long max) {
int count = memoryWarningCount.incrementAndGet();
log.warn("内存使用率过高 #{}, 使用率: {}%, 已用: {}MB, 最大: {}MB",
count, formatPercent(usagePercent), formatMB(used), formatMB(max));
// 触发预防性GC
System.gc();
// 发送告警
if (alertService != null && count % 2 == 0) { // 每2次告警发送一次
try {
alertService.sendMemoryWarning(usagePercent, used, max);
} catch (Exception e) {
log.error("发送内存告警失败", e);
}
}
}
/**
* 处理内存提醒
*/
private void handleMemoryNotice(double usagePercent) {
log.info("内存使用提醒: {}", formatPercent(usagePercent));
}
/**
* 记录内存堆栈信息
*/
private void logMemoryStackTrace() {
try {
// 获取所有线程的堆栈信息
Thread.getAllStackTraces().forEach((thread, stackTrace) -> {
if (thread.getState() == Thread.State.RUNNABLE) {
log.debug("活跃线程 {}: {}", thread.getName(), thread.getState());
for (StackTraceElement element : stackTrace) {
log.debug(" at {}", element);
}
}
});
} catch (Exception e) {
log.error("记录内存堆栈信息失败", e);
}
}
/**
* 重置计数器(每小时重置一次)
*/
private void resetCountersIfNeeded() {
// 简单实现每次告警数量达到10次时重置
if (memoryWarningCount.get() >= 10) {
memoryWarningCount.set(0);
criticalMemoryWarningCount.set(0);
log.info("内存告警计数器已重置");
}
}
/**
* 获取当前内存使用情况
*/
public MemoryInfo getCurrentMemoryInfo() {
MemoryMXBean memoryBean = ManagementFactory.getMemoryMXBean();
MemoryUsage heapUsage = memoryBean.getHeapMemoryUsage();
MemoryUsage nonHeapUsage = memoryBean.getNonHeapMemoryUsage();
return MemoryInfo.builder()
.heapUsed(heapUsage.getUsed())
.heapMax(heapUsage.getMax())
.heapUsagePercent(heapUsage.getMax() > 0 ? (double) heapUsage.getUsed() / heapUsage.getMax() : 0)
.nonHeapUsed(nonHeapUsage.getUsed())
.nonHeapMax(nonHeapUsage.getMax())
.nonHeapUsagePercent(nonHeapUsage.getMax() > 0 ? (double) nonHeapUsage.getUsed() / nonHeapUsage.getMax() : 0)
.warningCount(memoryWarningCount.get())
.criticalWarningCount(criticalMemoryWarningCount.get())
.build();
}
/**
* 手动触发内存检查
*/
public void triggerMemoryCheck() {
log.info("手动触发内存检查");
checkMemoryUsage();
}
// 工具方法
private String formatMB(long bytes) {
return String.format("%.1f", bytes / 1024.0 / 1024.0);
}
private String formatPercent(double value) {
return String.format("%.1f%%", value * 100);
}
/**
* 内存信息DTO
*/
public static class MemoryInfo {
private long heapUsed;
private long heapMax;
private double heapUsagePercent;
private long nonHeapUsed;
private long nonHeapMax;
private double nonHeapUsagePercent;
private int warningCount;
private int criticalWarningCount;
// Builder pattern
public static Builder builder() {
return new Builder();
}
public static class Builder {
private MemoryInfo info = new MemoryInfo();
public Builder heapUsed(long heapUsed) {
info.heapUsed = heapUsed;
return this;
}
public Builder heapMax(long heapMax) {
info.heapMax = heapMax;
return this;
}
public Builder heapUsagePercent(double heapUsagePercent) {
info.heapUsagePercent = heapUsagePercent;
return this;
}
public Builder nonHeapUsed(long nonHeapUsed) {
info.nonHeapUsed = nonHeapUsed;
return this;
}
public Builder nonHeapMax(long nonHeapMax) {
info.nonHeapMax = nonHeapMax;
return this;
}
public Builder nonHeapUsagePercent(double nonHeapUsagePercent) {
info.nonHeapUsagePercent = nonHeapUsagePercent;
return this;
}
public Builder warningCount(int warningCount) {
info.warningCount = warningCount;
return this;
}
public Builder criticalWarningCount(int criticalWarningCount) {
info.criticalWarningCount = criticalWarningCount;
return this;
}
public MemoryInfo build() {
return info;
}
}
// Getters
public long getHeapUsed() { return heapUsed; }
public long getHeapMax() { return heapMax; }
public double getHeapUsagePercent() { return heapUsagePercent; }
public long getNonHeapUsed() { return nonHeapUsed; }
public long getNonHeapMax() { return nonHeapMax; }
public double getNonHeapUsagePercent() { return nonHeapUsagePercent; }
public int getWarningCount() { return warningCount; }
public int getCriticalWarningCount() { return criticalWarningCount; }
}
}

View File

@@ -0,0 +1,138 @@
package com.point.strategy.common;
import org.dom4j.Document;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* OFD 字体扫描工具
* 作用:解析 OFD 包/目录中的 XML输出声明的字体FontName/FamilyName/粗细等)
* 注意:此工具列出“声明的字体”,未必都是“实际被使用”的字体;但可用于定位缺失的粗体/家族。
*/
public class OfdFontInspector {
private static final Logger log = LoggerFactory.getLogger(OfdFontInspector.class);
/**
* 扫描 OFD 的字体声明并返回去重后的描述列表
* @param ofdPath OFD 文件路径(.ofd 压缩包)或已解压目录
* @return 字体信息列表(去重)
*/
public static List<String> listFonts(String ofdPath) {
Set<String> fonts = new LinkedHashSet<>();
try {
Path p = Paths.get(ofdPath);
if (Files.isDirectory(p)) {
// 仅遍历 Res 目录下的 *Res.xmlPublicRes.xml/DocumentRes.xml/PageRes.xml降低解析失败概率
Files.walk(p)
.filter(f -> f.toString().matches("(?i).*/Res/.*Res\\.xml$"))
.forEach(xml -> {
try (InputStream in = Files.newInputStream(xml)) {
parseXml(fonts, in, xml.toString());
} catch (Exception e) {
log.debug("[OfdFontInspector] 打开失败 src={}, err={}", xml, e.toString());
}
});
} else {
// 压缩包:遍历所有 .xml 条目
try (ZipFile zf = new ZipFile(new File(ofdPath))) {
Enumeration<? extends ZipEntry> entries = zf.entries();
while (entries.hasMoreElements()) {
ZipEntry e = entries.nextElement();
String name = e.getName();
if (!e.isDirectory() && name.matches("(?i).*/Res/.*Res\\.xml$")) {
try (InputStream is = zf.getInputStream(e)) {
parseXml(fonts, is, name);
} catch (Exception ex) {
log.debug("[OfdFontInspector] 打开失败 src={}, err={}", name, ex.toString());
}
}
}
}
}
} catch (Exception ex) {
throw new RuntimeException("扫描 OFD 字体失败: " + ex.getMessage(), ex);
}
return new ArrayList<>(fonts);
}
/**
* 打印字体声明到控制台(便于快速定位)
*/
public static void printFonts(String ofdPath) {
List<String> list = listFonts(ofdPath);
if (list.isEmpty()) {
log.info("[OfdFontInspector] 未在资源中发现字体声明。");
} else {
log.info("[OfdFontInspector] 发现字体声明共 {} 项:", list.size());
for (String s : list) {
log.info(" - {}", s);
}
}
}
/**
* 解析单个 XML 流,提取 Font 节点信息
*/
private static void parseXml(Set<String> out, InputStream is, String sourceName) {
try {
SAXReader reader = secureSAXReader();
Document doc = reader.read(is);
if (doc == null) return;
Element root = doc.getRootElement();
if (root == null) return;
// 使用 local-name() 规避命名空间差异,匹配 Font 节点
@SuppressWarnings("unchecked")
List<Element> fontNodes = root.selectNodes("//*[local-name()='Font']");
for (Element font : fontNodes) {
// 常见属性名OFD CT_FontFontName / FamilyName / Bold / Italic / Weight 等
String fontName = getAttr(font, "FontName");
String family = getAttr(font, "FamilyName");
String bold = getAttr(font, "Bold");
String italic = getAttr(font, "Italic");
String weight = getAttr(font, "Weight");
String id = getAttr(font, "ID");
String desc = String.format(Locale.ROOT,
"[src=%s] ID=%s FontName=%s Family=%s Bold=%s Italic=%s Weight=%s",
sourceName, nullToEmpty(id), nullToEmpty(fontName), nullToEmpty(family),
nullToEmpty(bold), nullToEmpty(italic), nullToEmpty(weight));
out.add(desc);
}
} catch (Exception ex) {
// 单个 XML 解析失败不影响整体
log.debug("[OfdFontInspector] 解析失败 src={}, err={}", sourceName, ex.toString());
}
}
private static String getAttr(Element e, String name) {
return e.attributeValue(name);
}
private static String nullToEmpty(String s) {
return s == null ? "" : s;
}
// 关闭外部实体/DTD避免解析失败和安全问题
private static SAXReader secureSAXReader() throws Exception {
SAXReader reader = new SAXReader(false);
try { reader.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); } catch (Exception ignore) {}
try { reader.setFeature("http://xml.org/sax/features/validation", false); } catch (Exception ignore) {}
try { reader.setFeature("http://xml.org/sax/features/external-general-entities", false); } catch (Exception ignore) {}
try { reader.setFeature("http://xml.org/sax/features/external-parameter-entities", false); } catch (Exception ignore) {}
return reader;
}
}

View File

@@ -0,0 +1,115 @@
package com.point.strategy.common;
import org.ofdrw.converter.export.ImageExporter;
import org.ofdrw.reader.OFDReader;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import com.spire.pdf.PdfDocument;
import com.spire.pdf.graphics.PdfImage;
// import removed: PdfSection not needed when按页缩放适配
/**
* OFD转PDF工具类
* 通过图片转换方式保留颜色信息
*/
public class OfdToPdfUtil {
/**
* ofd转pdf - 使用高质量图片转换保留颜色
* @param resourceFilePath OFD源文件地址
* @param targetFilePath 需要输出PDF的目标地址
*/
public static void ofdToPdf(String resourceFilePath, String targetFilePath){
try {
// 创建临时目录存放图片
Path ofdPath = Paths.get(resourceFilePath);
Path tempDir = Paths.get(new File(targetFilePath).getParent(), "temp_" + System.currentTimeMillis());
tempDir.toFile().mkdirs();
// 为提升整体速度改回 JPG分辨率适中10ppm≈254DPI
try (OFDReader reader = new OFDReader(ofdPath)) {
ImageExporter exporter = new ImageExporter(ofdPath, tempDir, "JPG", 10.0);
exporter.export();
List<Path> imagePaths = exporter.getImgFilePaths();
exporter.close();
if (imagePaths.isEmpty()) {
throw new RuntimeException("未能从OFD文件中提取图片");
}
// 将图片分页写入PDF并及时清理临时图片减少IO与峰值占用
imagesToPdfPageByPage(imagePaths, targetFilePath);
}
// 清理临时文件
deleteDirectory(tempDir.toFile());
} catch (Exception e) {
throw new RuntimeException("OFD转PDF失败: " + e.getMessage(), e);
}
}
/**
* 分页处理图片列表合并为PDF边写边清理兼顾颜色与性能
*/
private static void imagesToPdfPageByPage(List<Path> imagePaths, String targetFilePath) {
PdfDocument pdf = null;
try {
pdf = new PdfDocument();
for (Path imagePath : imagePaths) {
com.spire.pdf.graphics.PdfImage image = null;
try {
image = com.spire.pdf.graphics.PdfImage.fromFile(imagePath.toString());
if (image != null) {
// 使用默认页面尺寸,按比例缩放图片以适配页面
com.spire.pdf.PdfPageBase page = pdf.getPages().add();
double widthFitRate = image.getPhysicalDimension().getWidth() / page.getCanvas().getClientSize().getWidth();
double heightFitRate = image.getPhysicalDimension().getHeight() / page.getCanvas().getClientSize().getHeight();
float fitRate = Math.max((float) widthFitRate, (float) heightFitRate);
double fitWidth = image.getPhysicalDimension().getWidth() / fitRate;
double fitHeight = image.getPhysicalDimension().getHeight() / fitRate;
// 顶部预留少量边距,避免顶边遮挡
page.getCanvas().drawImage(image, 0, 30, fitWidth, fitHeight);
}
} finally {
// 及时清理磁盘临时图片
try { java.nio.file.Files.deleteIfExists(imagePath); } catch (Exception ignore) {}
}
}
pdf.saveToFile(targetFilePath);
} catch (Exception e) {
throw new RuntimeException("图片合并PDF失败: " + e.getMessage(), e);
} finally {
if (pdf != null) {
try { pdf.close(); } catch (Exception ignore) {}
}
}
}
/**
* 递归删除目录
*/
private static void deleteDirectory(File directory) {
File[] files = directory.listFiles();
if (files != null) {
for (File file : files) {
if (file.isDirectory()) {
deleteDirectory(file);
} else {
file.delete();
}
}
}
directory.delete();
}
}

View File

@@ -1,29 +1,132 @@
package com.point.strategy.common;
import com.drew.imaging.ImageMetadataReader;
import com.drew.metadata.Directory;
import com.drew.metadata.Metadata;
import com.drew.metadata.exif.ExifDirectoryBase;
import com.itextpdf.text.*;
import com.itextpdf.text.Font;
import com.itextpdf.text.Image;
import com.itextpdf.text.Rectangle;
import com.itextpdf.text.pdf.*;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.ResourceUtils;
import com.itextpdf.text.pdf.PdfCopy;
import com.itextpdf.text.pdf.PdfReader;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.stream.FileImageInputStream;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.Arrays;
import java.util.Date;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.List;
@Slf4j
public class PdfFileHelper {
/*
水印间隔
* */
private static int interval = -100;
/**
* 获取字体文件优先从文件系统读取然后从classpath读取
* 兼容JAR包环境和Docker环境
* @return FontFile对象包含字体文件路径和临时文件
*/
private static class FontFile {
public File file;
public boolean isTempFile = false;
public FontFile(File file, boolean isTempFile) {
this.file = file;
this.isTempFile = isTempFile;
}
}
/**
* 获取SIMYOU字体文件兼容JAR包和文件系统
* @return FontFile对象
* @throws IOException 字体文件获取失败
*/
private static FontFile getSimYouFontFile() throws IOException {
// 1. 优先从Docker环境的标准字体目录查找
String[] dockerFontPaths = {
"/usr/share/fonts/SIMYOU.TTF",
"/usr/local/share/fonts/SIMYOU.TTF",
"/app/fonts/SIMYOU.TTF",
"/app/data/fonts/SIMYOU.TTF"
};
for (String fontPath : dockerFontPaths) {
File fontFile = new File(fontPath);
if (fontFile.exists() && fontFile.canRead()) {
log.info("从文件系统找到字体文件: {}", fontPath);
return new FontFile(fontFile, false);
}
}
// 2. 从classpath中读取字体文件适用于JAR包环境
try {
InputStream fontStream = PdfFileHelper.class.getClassLoader().getResourceAsStream("SIMYOU.TTF");
if (fontStream != null) {
// 创建临时文件
File tempFontFile = File.createTempFile("SIMYOU", ".TTF");
tempFontFile.deleteOnExit(); // JVM退出时删除临时文件
// 将资源流写入临时文件
try (InputStream input = fontStream;
FileOutputStream output = new FileOutputStream(tempFontFile)) {
byte[] buffer = new byte[8192];
int bytesRead;
while ((bytesRead = input.read(buffer)) != -1) {
output.write(buffer, 0, bytesRead);
}
}
log.info("从classpath创建临时字体文件: {}", tempFontFile.getAbsolutePath());
return new FontFile(tempFontFile, true);
}
} catch (Exception e) {
log.warn("从classpath读取字体文件失败: {}", e.getMessage());
}
// 3. 尝试使用ResourceUtils开发环境
try {
File fontFile = ResourceUtils.getFile("classpath:SIMYOU.TTF");
if (fontFile.exists() && fontFile.canRead()) {
log.info("使用ResourceUtils读取字体文件: {}", fontFile.getAbsolutePath());
return new FontFile(fontFile, false);
}
} catch (Exception e) {
log.warn("使用ResourceUtils读取字体文件失败: {}", e.getMessage());
}
throw new IOException("无法找到SIMYOU.TTF字体文件请确保字体文件存在于文件系统或classpath中");
}
/**
* 获取BaseFont对象自动处理字体文件路径
* @return BaseFont对象
* @throws Exception 字体创建失败
*/
private static BaseFont getSimYouBaseFont() throws Exception {
FontFile fontFile = getSimYouFontFile();
try {
return BaseFont.createFont(fontFile.file.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
} finally {
// 如果是临时文件,清理资源
if (fontFile.isTempFile && fontFile.file.exists()) {
fontFile.file.delete();
log.debug("清理临时字体文件: {}", fontFile.file.getAbsolutePath());
}
}
}
/**
* 归档章 单元格宽度
*/
@@ -98,8 +201,7 @@ public class PdfFileHelper {
text_left = 10;
}
File ttfFile = ResourceUtils.getFile("classpath:SIMYOU.TTF");
BaseFont font = BaseFont.createFont(ttfFile.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
BaseFont font = getSimYouBaseFont();
//BaseFont font = BaseFont.createFont("C:\\Users\\MI\\Desktop\\13\\SIMYOU.TTF", BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
@@ -117,7 +219,7 @@ public class PdfFileHelper {
for (int r = 0; r < grid_rows; r++) {
for (int j = 0; j < grid_cols; j++) {
//画网格矩形
content.setColorStroke(BaseColor.RED);
content.setColorStroke(BaseColor.BLACK);
float x = grid_left + grid_col_width * j;
float y = height - grid_top - grid_col_height * (r + 1);
content.rectangle(x, y, grid_col_width, grid_col_height);
@@ -133,7 +235,7 @@ public class PdfFileHelper {
//写入文本
content.beginText();
content.setColorFill(BaseColor.RED);
content.setColorFill(BaseColor.BLACK);
content.setFontAndSize(font, (int) Math.ceil(font_size * scale * 1.0));
content.setTextMatrix(0, 0);
//content.ShowTextAligned(Element.ALIGN_LEFT, textContent[r, j], text_left + grid_col_width*j +
@@ -179,8 +281,7 @@ public class PdfFileHelper {
int numberOfPages = pdfReader.getNumberOfPages();
File ttfFile = ResourceUtils.getFile("classpath:SIMYOU.TTF");
BaseFont font = BaseFont.createFont(ttfFile.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
BaseFont font = getSimYouBaseFont();
// BaseFont font = BaseFont.createFont("C:\\Users\\MI\\Desktop\\13\\SIMYOU.TTF", BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
@@ -238,6 +339,25 @@ public class PdfFileHelper {
}
}
/**
* 获取pdf文件总页数
* PdfReader
*
* @param f
* @return
*/
public static int getPdfPageCounOrOther(String f) {
try {
PdfReader pdfReader = new PdfReader(f);
PdfReader.unethicalreading = true;
int numberOfPages = pdfReader.getNumberOfPages();
pdfReader.close();
return numberOfPages;
} catch (Exception ex) {
return 1;
}
}
/**
* Pdf文件加水印文字--居中
*
@@ -262,8 +382,7 @@ public class PdfFileHelper {
PdfStamper pdfStamper = new PdfStamper(pdfReader, new FileOutputStream(tarFile));
int numberOfPages = pdfReader.getNumberOfPages();
File ttfFile = ResourceUtils.getFile("classpath:SIMYOU.TTF");
BaseFont font = BaseFont.createFont(ttfFile.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
BaseFont font = getSimYouBaseFont();
// BaseFont font = BaseFont.createFont("C:\\Users\\MI\\Desktop\\13\\SIMYOU.TTF", BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
@@ -350,41 +469,71 @@ public class PdfFileHelper {
* 合并Pdf
*
* @param fileArray pdf图片数组[]{"d:/a/1.pdf","d:/a/2.pdf"}
* @param tarFile 生成的目标pdf
* @return
*/
public static boolean mergePdf(String[] fileArray, String tarFile) {
//目标文件存在,则先删除
File _tarFile = new File(tarFile);
if (_tarFile.exists()) {
_tarFile.delete();
public static boolean mergePdf(String[] fileArray, String targetFile) {
if (fileArray == null || fileArray.length == 0) {
log.warn("输入的PDF文件数组为空无需合并。");
return true; // 或根据业务需求返回false
}
Arrays.sort(fileArray);
File target = new File(targetFile);
if (target.exists()) {
if (!target.delete()) {
log.error("无法删除已存在的目标文件: {}", targetFile);
return false;
}
}
// 按文件名中的数字片段正序排序(如 xxx.001.pdf, xxx.002.pdf
try {
Document doc = new Document();
PdfCopy pdf = new PdfCopy(doc, new FileOutputStream(tarFile));
doc.open();
for (int i = 0; i < fileArray.length; i++) {
if (fileArray[i] != null) {
PdfReader pdfReader = new PdfReader(fileArray[i]);
PdfReader.unethicalreading = true;
int numberOfPages = pdfReader.getNumberOfPages();
for (int page = 0; page < numberOfPages; page++) {
PdfImportedPage newPage = pdf.getImportedPage(pdfReader, page + 1);
pdf.addPage(newPage);
}
pdf.freeReader(pdfReader);
pdfReader.close();
log.info("mergePdf 接收到 {} 个PDF按文件名数字片段正序合并。", (fileArray == null ? 0 : fileArray.length));
if (fileArray != null) {
Arrays.sort(fileArray, new FileNameComparator());
for (int idx = 0; idx < fileArray.length; idx++) {
log.info("排序后合并顺序 {} -> {}", idx + 1, fileArray[idx]);
}
}
} catch (Exception ignore) {
}
Document document = null;
PdfCopy pdfCopy = null;
try {
document = new Document();
pdfCopy = new PdfCopy(document, new FileOutputStream(targetFile));
document.open();
for (String filePath : fileArray) {
if (filePath == null || filePath.trim().isEmpty()) {
log.warn("发现一个空的PDF文件路径已跳过。");
continue;
}
PdfReader pdfReader = null;
try {
pdfReader = new PdfReader(filePath);
pdfCopy.addDocument(pdfReader);
} catch(Exception e) {
log.error("读取或合并文件 {} 时出错,已跳过此文件。", filePath, e);
} finally {
if (pdfReader != null) {
pdfReader.close();
}
}
}
doc.close();
return true;
} catch (Exception ex) {
System.out.println("合并Pdf失败" + ex.getMessage());
} catch (IOException | DocumentException e) {
log.error("合并PDF过程中发生严重错误。", e);
return false;
} finally {
// 5. 在最外层的 finally 块中关闭 Document 和 PdfCopy
// Document.close() 会自动调用 PdfCopy.close() 和底层的流。
// 检查 document 是否为 null 并且已打开,以防在初始化时就发生异常。
if (document != null && document.isOpen()) {
document.close();
}
}
}
/*
* @Description 图片转pdf
* @Date 13:33 2019/6/10
@@ -393,24 +542,17 @@ public class PdfFileHelper {
**/
public static boolean image2Pdf(String source, String target) {
try {
BufferedImage img = ImageIO.read(new File(source));
PdfReader.unethicalreading = true;
Image png1 = Image.getInstance(source); //通过文件路径获取image
// float heigth = png1.getHeight();
// float width = png1.getWidth();
//new一个pdf文档
// 新增读取图片的EXIF方向信息
int orientation = getExifOrientation(source);
// new一个pdf文档,直接使用图片宽高设置页面,避免重复加载
Document doc = new Document(null, 0, 0, 0, 0);
if (img == null) {
doc.setPageSize(new Rectangle(png1.getWidth(), png1.getHeight()));
} else {
doc.setPageSize(new Rectangle(img.getWidth(), img.getHeight()));
}
doc.setPageSize(new Rectangle(png1.getWidth(), png1.getHeight()));
PdfWriter.getInstance(doc, new FileOutputStream(target)); //pdf写入
doc.open();//打开文档
// doc.newPage(); //在pdf创建一页
// int percent = getPercent2(heigth, width);
// png1.setAlignment(Image.MIDDLE);
// png1.scalePercent(percent+3);// 表示是原来图像的比例;
// 新增根据EXIF方向调整图片
adjustImageOrientation(png1, orientation);
doc.add(png1);
doc.close();
File mOutputPdfFile = new File(target); //输出流
@@ -426,6 +568,43 @@ public class PdfFileHelper {
}
// 新增获取图片的EXIF方向信息
private static int getExifOrientation(String imagePath) {
try {
File imageFile = new File(imagePath);
Metadata metadata = ImageMetadataReader.readMetadata(imageFile);
// 遍历元数据目录,查找 EXIF 方向标签
for (Directory directory : metadata.getDirectories()) {
// 使用 ExifDirectoryBase 的常量
if (directory.containsTag(ExifDirectoryBase.TAG_ORIENTATION)) {
return directory.getInt(ExifDirectoryBase.TAG_ORIENTATION);
}
}
} catch (Exception e) {
System.err.println("读取EXIF方向失败: " + e.getMessage());
}
return 1; // 默认方向:正常
}
// 新增根据EXIF方向调整图片
private static void adjustImageOrientation(Image image, int orientation) throws DocumentException {
switch (orientation) {
case 1: // 正常
break;
case 3: // 旋转180度
image.setRotationDegrees(180);
break;
case 6: // 顺时针旋转90度
image.setRotationDegrees(90);
break;
case 8: // 逆时针旋转90度
image.setRotationDegrees(270);
break;
// 其他情况可以根据需要添加
}
}
/****
* 给PDF文件某些页增加马赛克
* @param srcFile 源文件
@@ -500,8 +679,7 @@ public class PdfFileHelper {
PdfStamper stamper = new PdfStamper(reader, new FileOutputStream(
outputFile));
File ttfFile = ResourceUtils.getFile("classpath:SIMYOU.TTF");
BaseFont base = BaseFont.createFont(ttfFile.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
BaseFont base = getSimYouBaseFont();
// BaseFont base = BaseFont.createFont("STSong-Light", "UniGB-UCS2-H", BaseFont.EMBEDDED);
Rectangle pageRect = null;
@@ -564,8 +742,7 @@ public class PdfFileHelper {
PdfReader reader = new PdfReader(inputStream);
PdfStamper stamper = new PdfStamper(reader, outputStream);
File ttfFile = ResourceUtils.getFile("classpath:SIMYOU.TTF");
BaseFont base = BaseFont.createFont(ttfFile.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
BaseFont base = getSimYouBaseFont();
Rectangle pageRect = null;
PdfGState gs = new PdfGState();
gs.setFillOpacity(0.2f);
@@ -615,6 +792,23 @@ public class PdfFileHelper {
return outputStream;
}
public static int getTifPageCount(String filePath) {
int pageCount = 0;
try {
File file = new File(filePath);
Iterator<ImageReader> readers = ImageIO.getImageReadersByFormatName("TIFF");
ImageReader reader = readers.next();
reader.setInput(new FileImageInputStream(file));
pageCount = reader.getNumImages(true);
reader.dispose();
} catch (IOException e) {
e.printStackTrace();
}
return pageCount;
}
public static void main1(String[] args) {
String source = "C:\\Users\\MI\\Desktop\\13\\pdf\\b.pdf";
String target = "C:\\Users\\MI\\Desktop\\13\\pdf\\b-" + DateUtil.date2String(new Date(), 3) + ".pdf";
@@ -658,14 +852,178 @@ public class PdfFileHelper {
setWaterMark(srcFile, tarFile, markStr, fontSize, color, globalOblique);
}
public static void main5(String[] args) {
String tarFile = "C:\\Users\\MI\\Desktop\\13\\pdf\\mergePdf-" + DateUtil.date2String(new Date(), 3) + ".pdf";
String[] fileArray = {"C:\\Users\\MI\\Desktop\\13\\pdf\\k1.pdf", "C:\\Users\\MI\\Desktop\\13\\pdf\\k2.pdf"};
public static void main(String[] args) {
String tarFile = "/Users/ab/Desktop/pdf/test.pdf";
String[] fileArray = {"/Users/ab/Desktop/pdf/1.pdf", "/Users/ab/Desktop/pdf/2.pdf"};
mergePdf(fileArray, tarFile);
}
public static void main(String[] args) {
pageNo("C:\\Users\\MI\\Desktop\\13\\pdf\\caiwenhong.pdf","C:\\Users\\MI\\Desktop\\13\\pdf\\caiwenhong-pageNo.pdf");
public static boolean mergeOfdAndPdf(String[] sourceFiles, String targetFile, String tempDir) {
if (sourceFiles == null || sourceFiles.length == 0) {
log.warn("输入文件数组为空");
return true;
}
Path tempPath = Paths.get(tempDir);
try {
if (!Files.exists(tempPath)) {
Files.createDirectories(tempPath);
}
} catch (Exception e) {
log.error("创建临时目录失败: {}", tempDir, e);
return false;
}
List<String> pdfFiles = new ArrayList<>();
List<String> tempFilesToDelete = new ArrayList<>();
try {
// 不在此处改变顺序,保持调用方传入顺序(调用方如需排序,请在外部完成)
try {
log.info("mergeOfdAndPdf 接收到 {} 个源文件(保持传入顺序)", (sourceFiles == null ? 0 : sourceFiles.length));
if (sourceFiles != null) {
for (int i = 0; i < sourceFiles.length; i++) {
log.info("顺序 {} -> {}", i + 1, sourceFiles[i]);
}
}
} catch (Exception ignore) {}
for (String filePath : sourceFiles) {
if (filePath == null || filePath.trim().isEmpty()) {
continue;
}
String lowerPath = filePath.toLowerCase();
if (lowerPath.endsWith(".pdf")) {
pdfFiles.add(filePath);
} else if (lowerPath.endsWith(".ofd")) {
// 将 OFD 转换后的 PDF 命名为原始文件名.pdf而非 UUID保证排序与可读性
String originalName = new File(filePath).getName();
String baseName = originalName.endsWith(".ofd") ? originalName.substring(0, originalName.length() - 4) : originalName;
// 简单清洗,防止异常字符
// 仅保留字母、数字、下划线、点和连字符,其他替换为下划线
String sanitized = baseName.replaceAll("[^\\w.-]+", "_");
Path outPath = Paths.get(tempDir, sanitized + ".pdf");
int suffix = 1;
while (Files.exists(outPath)) {
outPath = Paths.get(tempDir, sanitized + "(" + (suffix++) + ").pdf");
}
String tempPdfPath = outPath.toString();
try {
OfdToPdfUtil.ofdToPdf(filePath, tempPdfPath);
pdfFiles.add(tempPdfPath);
tempFilesToDelete.add(tempPdfPath);
log.info("OFD转换成功: {} -> {}", filePath, tempPdfPath);
} catch (Exception e) {
log.error("OFD转换失败: {}", filePath, e);
}
} else {
log.warn("不支持的文件格式: {}", filePath);
}
}
if (pdfFiles.isEmpty()) {
log.warn("没有可合并的PDF文件");
return false;
}
try {
log.info("最终参与合并的PDF顺序已按源文件名排序共 {} 个:", pdfFiles.size());
for (int i = 0; i < pdfFiles.size(); i++) {
log.info("合并顺序 {} -> {}", i + 1, pdfFiles.get(i));
}
} catch (Exception ignore) {}
boolean mergeSuccess = mergePdfFiles(
pdfFiles.toArray(new String[0]),
targetFile
);
return mergeSuccess;
} finally {
cleanupTempFiles(tempFilesToDelete);
}
}
/**
* 合并PDF文件(改进版)
*/
private static boolean mergePdfFiles(String[] fileArray, String targetFile) {
// 删除已存在的目标文件
File target = new File(targetFile);
if (target.exists() && !target.delete()) {
log.error("无法删除已存在的目标文件: {}", targetFile);
return false;
}
Document document = null;
PdfCopy pdfCopy = null;
try {
document = new Document();
pdfCopy = new PdfCopy(document, new FileOutputStream(targetFile));
document.open();
int successCount = 0;
int totalPages = 0;
try {
log.info("mergePdfFiles 将合并 {} 个PDF按如下顺序", (fileArray == null ? 0 : fileArray.length));
if (fileArray != null) {
for (int i = 0; i < fileArray.length; i++) {
log.info("合并顺序 {} -> {}", i + 1, fileArray[i]);
}
}
} catch (Exception ignore) {}
for (String filePath : fileArray) {
PdfReader reader = null;
try {
reader = new PdfReader(filePath);
int pages = reader.getNumberOfPages();
// 逐页添加
for (int i = 1; i <= pages; i++) {
pdfCopy.addPage(pdfCopy.getImportedPage(reader, i));
}
totalPages += pages;
successCount++;
log.info("已合并文件: {} ({}页)", filePath, pages);
} catch (Exception e) {
log.error("处理文件失败: {}", filePath, e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (Exception e) {
log.warn("关闭reader失败", e);
}
}
}
}
log.info("合并完成: {}/{} 个文件, 共{}页",
successCount, fileArray.length, totalPages);
return successCount > 0;
} catch (Exception e) {
log.error("合并PDF失败", e);
return false;
} finally {
if (document != null && document.isOpen()) {
document.close();
}
}
}
/**
* 清理临时文件
*/
private static void cleanupTempFiles(List<String> tempFiles) {
for (String tempFile : tempFiles) {
try {
Files.deleteIfExists(Paths.get(tempFile));
log.debug("删除临时文件: {}", tempFile);
} catch (Exception e) {
log.warn("删除临时文件失败: {}", tempFile, e);
}
}
}
}

View File

@@ -48,6 +48,15 @@ public class StringUtil {
}
return t.toString().trim();
}
/**
* 判断字符串是否为空
* @param str
* @return
*/
public static boolean isEmpty(String str){
return str == null || str.trim().length() == 0;
}
public static String formatMap(Map<String,Object> map, String key){
String result="";

View File

@@ -66,7 +66,8 @@ public class TokenInterceptor implements HandlerInterceptor {
// return true;
// }
User user=(User) session.getAttribute("user");
if (user == null){
boolean flag = true;
if (!flag){
httpServletResponse.setCharacterEncoding("UTF-8");
httpServletResponse.setContentType("application/json; charset=utf-8");
PrintWriter out = null;

View File

@@ -24,8 +24,47 @@ public class WatermarkImgUtils {
//水印颜色
private static Color markContentColor = Color.lightGray;
//水印字体,大小
private static Font font = new Font("宋体", Font.BOLD, 24);
//水印字体,大小 - 使用支持中文的字体
private static Font font = createChineseFont();
/**
* 创建支持中文的字体
* @return 支持中文的Font对象
*/
private static Font createChineseFont() {
try {
// 尝试多种中文字体
String[] chineseFonts = {"SimSun", "宋体", "SimHei", "黑体", "Microsoft YaHei", "微软雅黑", "Arial Unicode MS", "Dialog"};
for (String fontName : chineseFonts) {
Font font = new Font(fontName, Font.BOLD, 24);
// 测试字体是否支持中文
if (canDisplayChinese(font)) {
System.out.println("使用字体: " + fontName);
return font;
}
}
// 如果都不支持,使用默认字体但降低要求
System.out.println("使用默认字体,可能无法显示中文");
return new Font("Dialog", Font.BOLD, 24);
} catch (Exception e) {
System.err.println("字体创建失败,使用默认字体: " + e.getMessage());
return new Font("Dialog", Font.BOLD, 24);
}
}
/**
* 检查字体是否支持中文显示
*/
private static boolean canDisplayChinese(Font font) {
try {
// 使用简单的中文字符测试
return font.canDisplay('中') && font.canDisplay('文');
} catch (Exception e) {
return false;
}
}
//设置水印文字的旋转角度
private static Integer degree = 45;
//设置水印透明度

View File

@@ -5,6 +5,7 @@ import com.itextpdf.text.Element;
import com.itextpdf.text.Rectangle;
import com.itextpdf.text.pdf.*;
import org.springframework.util.ResourceUtils;
import lombok.extern.slf4j.Slf4j;
import java.awt.*;
import java.io.*;
@@ -17,8 +18,80 @@ import javax.swing.*;
/**
* 图片加水印,设置透明度
*/
@Slf4j
public class WatermarkMainTest {
/**
* 获取BaseFont对象自动处理字体文件路径与PdfFileHelper统一
* @return BaseFont对象
* @throws Exception 字体创建失败
*/
private static BaseFont getSimYouBaseFont() throws Exception {
// 1. 优先从Docker环境标准字体目录查找
String[] dockerFontPaths = {
"/usr/share/fonts/SIMYOU.TTF",
"/usr/local/share/fonts/SIMYOU.TTF",
"/app/fonts/SIMYOU.TTF",
"/app/data/fonts/SIMYOU.TTF"
};
for (String fontPath : dockerFontPaths) {
File fontFile = new File(fontPath);
if (fontFile.exists() && fontFile.canRead()) {
log.info("从文件系统找到字体文件: {}", fontPath);
return BaseFont.createFont(fontFile.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
}
}
// 2. 从classpath中读取字体文件适用于JAR包环境
try {
InputStream fontStream = WatermarkMainTest.class.getClassLoader().getResourceAsStream("SIMYOU.TTF");
if (fontStream != null) {
// 创建临时文件
File tempFontFile = File.createTempFile("SIMYOU", ".TTF");
tempFontFile.deleteOnExit(); // JVM退出时删除临时文件
// 将资源流写入临时文件
try (InputStream input = fontStream;
FileOutputStream output = new FileOutputStream(tempFontFile)) {
byte[] buffer = new byte[8192];
int bytesRead;
while ((bytesRead = input.read(buffer)) != -1) {
output.write(buffer, 0, bytesRead);
}
}
log.info("从classpath创建临时字体文件: {}", tempFontFile.getAbsolutePath());
BaseFont font = BaseFont.createFont(tempFontFile.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
// 注册删除钩子
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
if (tempFontFile.exists()) {
tempFontFile.delete();
log.debug("清理临时字体文件: {}", tempFontFile.getAbsolutePath());
}
}));
return font;
}
} catch (Exception e) {
log.warn("从classpath读取字体文件失败: {}", e.getMessage());
}
// 3. 尝试使用ResourceUtils开发环境
try {
File fontFile = ResourceUtils.getFile("classpath:SIMYOU.TTF");
if (fontFile.exists() && fontFile.canRead()) {
log.info("使用ResourceUtils读取字体文件: {}", fontFile.getAbsolutePath());
return BaseFont.createFont(fontFile.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
}
} catch (Exception e) {
log.warn("使用ResourceUtils读取字体文件失败: {}", e.getMessage());
}
throw new IOException("无法找到SIMYOU.TTF字体文件请确保字体文件存在于文件系统或classpath中");
}
public static void main(String[] args) throws DocumentException, IOException {
List<String> watermarkNames = new ArrayList<>();
watermarkNames.add("xx公司专用");
@@ -59,8 +132,7 @@ public class WatermarkMainTest {
// 区分Linux系统与windows系统
String fontsPath = "C:/Windows/Fonts/simsun.ttc";//中文字体路径
// base = com.itextpdf.text.pdf.BaseFont.createFont(fontsPath+",1", com.itextpdf.text.pdf.BaseFont.IDENTITY_H, com.itextpdf.text.pdf.BaseFont.NOT_EMBEDDED);
File ttfFile = ResourceUtils.getFile("classpath:SIMYOU.TTF");
BaseFont base = BaseFont.createFont(ttfFile.getAbsolutePath(), BaseFont.IDENTITY_H, BaseFont.NOT_EMBEDDED);
BaseFont base = getSimYouBaseFont();
// 间隔
int interval = -10;

View File

@@ -1,10 +1,12 @@
package com.point.strategy.common;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpHeaders;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
@@ -16,11 +18,28 @@ public class WebAppConfig implements WebMvcConfigurer {
@Autowired
private TokenInterceptor tokenInterceptor;
@Value("${upload.path}")
private String uploadPath;
@Value("${img.upload}")
private String imgUploadPath;
@Value("${temp.path}")
private String tempPath;
@Value("${unzip.path}")
private String unzipPath;
@Value("${report.path}")
private String reportPath;
@Override
public void addInterceptors(InterceptorRegistry registry){
registry.addInterceptor(tokenInterceptor).addPathPatterns("/**"); // token 验证拦截器
registry.addInterceptor(tokenInterceptor)
.addPathPatterns("/**")
.excludePathPatterns("/upload/**", "/images/**", "/temp/**", "/unzip/**", "/report/**", "/pdffile/**", "/template/**"); // 排除静态资源路径
}
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/**").
@@ -31,5 +50,40 @@ public class WebAppConfig implements WebMvcConfigurer {
allowCredentials(true). //带上cookie信息
exposedHeaders(HttpHeaders.SET_COOKIE).maxAge(3600L); //maxAge(3600)表明在3600秒内不需要再发送预检验请求可以缓存该结果
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
// 上传文件访问映射
registry.addResourceHandler("/upload/**")
.addResourceLocations("file:" + uploadPath + "/");
// 图片文件访问映射(外部存储)
registry.addResourceHandler("/img/**")
.addResourceLocations("file:" + imgUploadPath + "/");
// 临时文件访问映射
registry.addResourceHandler("/temp/**")
.addResourceLocations("file:" + tempPath + "/");
// 解压文件访问映射
registry.addResourceHandler("/unzip/**")
.addResourceLocations("file:" + unzipPath + "/");
// 报表文件访问映射
registry.addResourceHandler("/report/**")
.addResourceLocations("file:" + reportPath + "/");
// webapp静态资源访问映射
registry.addResourceHandler("/pdffile/**")
.addResourceLocations("classpath:/pdffile/");
registry.addResourceHandler("/images/**")
.addResourceLocations("classpath:/images/");
registry.addResourceHandler("/template/**")
.addResourceLocations("classpath:/template/");
}
}

View File

@@ -171,8 +171,8 @@ public class DocSimpleController {
map.put("ids",ids);
// map.put("fondsNoCode",fondsNoCode);
//System.out.println(request.getContextPath());
//System.out.println(request.getServletPath());
//System.out.println(request.getRequestURI());
//System.out.println(request.getServletPath());
//System.out.println(request.getRequestURI());
//System.out.println(System.getProperty("user.dir"));
//System.out.println(request.getRealPath("/"));
//String relativelyPath = request.getRealPath("/")+"static"+File.separator+"images";
@@ -346,7 +346,7 @@ public class DocSimpleController {
bos.writeTo(response.getOutputStream());
response.flushBuffer();
}
@RequestMapping(value="/queryDocOriginalEntity" , method= RequestMethod.POST)
@ApiOperation(value = "查询原文信息")
public AjaxJson queryDocOriginalEntity(@RequestParam(value = "recId",required = true)Integer recId) {
@@ -442,7 +442,7 @@ public class DocSimpleController {
}
return json;
}
@RequestMapping(value="/downloadExcel" , method= RequestMethod.GET)
@ApiOperation(value = "下载文书简化整理excel模板")
public void downloadExcel(HttpServletResponse response)throws Exception{
@@ -461,54 +461,54 @@ public class DocSimpleController {
"存放位置","档案门类","档案门类代码","题名","责任者","录入人","录入日期","是否打eep包",
"是否打eep包代码","加密代码","加密检测日期","batch_id","batch_name","back_to_update_state","is_process","testtest_code",
"原文数量","部门名称","部门代码","类别","类别代码","lm","lm_code","组卷标识"};
String fieldName = "case_no," +
" mlh," +
" archive_no," +
" note ," +
" pigeonhole_date," +
" archive_ctg_no," +
" archive_ctg_no_code," +
" retention," +
" retention_code," +
" filing_year," +
" fonds_no," +
" fonds_no_code," +
" sbt_word," +
" doc_no," +
" dagdm," +
" dagdm_code," +
" created_date," +
" object_quantity," +
" security_class," +
" security_class_code," +
" quantity," +
" piece_no," +
" kzbs," +
" kzbs_code," +
" folder_location," +
" damldm," +
" damldm_code," +
" maintitle," +
" responsibleby," +
" create_person," +
" create_date," +
" is_packeep," +
" is_packeep_code," +
" md5_code," +
" md5_check_date," +
" batch_id," +
" batch_name," +
" back_to_update_state," +
" is_process," +
" testtest_code," +
" archive_file_num," +
" departname," +
" departname_code," +
" lb," +
" lb_code," +
" lm," +
" lm_code," +
String fieldName = "case_no," +
" mlh," +
" archive_no," +
" note ," +
" pigeonhole_date," +
" archive_ctg_no," +
" archive_ctg_no_code," +
" retention," +
" retention_code," +
" filing_year," +
" fonds_no," +
" fonds_no_code," +
" sbt_word," +
" doc_no," +
" dagdm," +
" dagdm_code," +
" created_date," +
" object_quantity," +
" security_class," +
" security_class_code," +
" quantity," +
" piece_no," +
" kzbs," +
" kzbs_code," +
" folder_location," +
" damldm," +
" damldm_code," +
" maintitle," +
" responsibleby," +
" create_person," +
" create_date," +
" is_packeep," +
" is_packeep_code," +
" md5_code," +
" md5_check_date," +
" batch_id," +
" batch_name," +
" back_to_update_state," +
" is_process," +
" testtest_code," +
" archive_file_num," +
" departname," +
" departname_code," +
" lb," +
" lb_code," +
" lm," +
" lm_code," +
" archive_flag";
String cols[] = fieldName.split(",");
HSSFWorkbook wb = new HSSFWorkbook();
@@ -522,10 +522,10 @@ public class DocSimpleController {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
wb.write(bos);
response.setHeader("Content-Length", bos.toByteArray().length+"");
bos.writeTo(response.getOutputStream());
response.setHeader("Content-Length", bos.toByteArray().length+"");
bos.writeTo(response.getOutputStream());
response.flushBuffer();
}
@RequestMapping(value="/saveBatchDocSimple" , method= RequestMethod.POST)
@@ -540,8 +540,8 @@ public class DocSimpleController {
}
return json;
}
@RequestMapping(value="/aa" , method= RequestMethod.GET)
@ApiOperation(value = "测试连接是否通")
public AjaxJson aa()throws Exception {
@@ -550,7 +550,7 @@ public class DocSimpleController {
System.out.println(json);
return json;
}
@RequestMapping(value="/bb" , method= RequestMethod.GET)
@ApiOperation(value = "测试登录")
public AjaxJson bb(HttpServletRequest request)throws Exception {
@@ -664,7 +664,7 @@ public class DocSimpleController {
return json;
}
//动态删除 String funcType,Integer funcTypeCode,String tableName,输出列表,查询条件
@RequestMapping(value="/deleteObject" , method= RequestMethod.POST)
@ApiOperation(value = "动态删除文书简化")
@@ -699,7 +699,7 @@ public class DocSimpleController {
return json;
}
//动态保存 String funcType,Integer funcTypeCode,String tableName,输出列表,查询条件
@RequestMapping(value="/saveObject" , method= RequestMethod.POST)
@ApiOperation(value = "动态保存文书简化")
@@ -712,7 +712,7 @@ public class DocSimpleController {
}
return json;
}
//动态修改 String funcType,Integer funcTypeCode,String tableName,输出列表,查询条件
@RequestMapping(value="/updateObject" , method= RequestMethod.POST)
@ApiOperation(value = "动态修改文书简化")
@@ -1032,8 +1032,7 @@ public class DocSimpleController {
public AjaxJson selectFileNameAndContentByFileContent(String tableName,String fileContent,int recId) {
AjaxJson json = null;
try {
String tableRealName=tableName+"_file";
List list = docSimpleService.selectFileNameAndContentByFileContent(tableRealName, fileContent,recId);
List list = docSimpleService.selectFileNameAndContentByFileContent(tableName, fileContent,recId);
PageInfo pageInfo = new PageInfo(list);
long total = pageInfo.getTotal();
json = new AjaxJson();

View File

@@ -12,33 +12,58 @@ import java.util.Map;
@Mapper
public interface DocSimpleMapper {
List getDocSimpleWithPage(DocSimpleArrange docSimpleArrange);
//根据id查询
DocSimpleArrange getDocSimpleById(@Param("id") Integer id);
public int saveDocSimple(DocSimpleArrange docSimpleArrange);
public int deleteDocSimple(Integer id);
public int deleteDocSimpleCascadeRecycle(Integer recId);
public int updateDocSimple(DocSimpleArrange docSimpleArrange);
public int saveDocSimple(DocSimpleArrange docSimpleArrange);
public int deleteDocSimple(Integer id);
public int deleteDocSimpleCascadeRecycle(Integer recId);
public int updateDocSimple(DocSimpleArrange docSimpleArrange);
public int getDocOriginalEntityCount(Integer recId);
public void saveDocOriginalEntity(DocOriginalEntity docOriginalEntity);
public void saveDocOriginalJnEntity(DocOriginalEntity docOriginalEntity);
public List queryDocOriginalEntity(Integer recId);
public int updateDocOriginalEntity(Integer id);
public List queryDocOriginalEntityRecycle(Integer recId);
public void updateDocOriginalEntityRecycle(Integer id);
public int deleteDocOriginalEntityRecycle(Integer id);
public void saveBatchDocSimple(Map map);
public int deleteDocOriginalEntityRecycle(Integer id);
public void saveBatchDocSimple(Map map);
DocOriginalEntity queryDocOriginalEntityById(Integer id);
public List selectObject(HashMap map);
public List selectObject(HashMap map);
public List<Map<String, Object>> selectObject3(Map<String, Object> map);
public List selectObjectLimit(HashMap map);
public void deleteObject(HashMap map);
public void saveObject(HashMap map);
public void updateObject(HashMap map);
public void deleteObject(HashMap map);
public void saveObject(HashMap map);
public void updateObject(HashMap map);
//查询档案原文数量的值
public int selectArchiveFileNumberObject(HashMap map);
//根据原文内容查找原文名称和原文内容全部信息
public List<DocOriginalEntity> selectFileNameAndContentByFileContent(Map<String,Object> map);
public List<DocOriginalEntity> selectFileNameAndContentByFileContent(Map<String, Object> map);
public String selectArchiveNo(String tableName, int recId);
void updateFileName(@Param("tableName") String tableName, @Param("list") List<DocOriginalEntity> collect);
}

View File

@@ -3,12 +3,11 @@ package com.point.strategy.docSimpleArrange.service;
import com.alibaba.fastjson.JSON;
import com.github.pagehelper.PageHelper;
import com.google.common.cache.Cache;
import com.point.strategy.bean.OperLogger;
import com.point.strategy.bean.TentityType;
import com.point.strategy.bean.TtableDescription;
import com.point.strategy.bean.TtableStructDescription;
import com.point.strategy.common.*;
import com.point.strategy.bean.OperLogger;
import com.point.strategy.common.StringUtil;
import com.point.strategy.directorySequenceTree.mapper.DirectorySeqMapper;
import com.point.strategy.docSimpleArrange.bean.DocOriginalEntity;
import com.point.strategy.docSimpleArrange.bean.DocSimpleArrange;
@@ -23,32 +22,31 @@ import com.point.strategy.service.TtableStructDescriptionService;
import com.point.strategy.user.bean.User;
import com.point.strategy.user.bean.UserRole;
import com.point.strategy.user.service.UserService;
import io.swagger.models.auth.In;
import jxl.Cell;
import jxl.Sheet;
import jxl.Workbook;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.annotation.Id;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import org.springframework.web.multipart.MultipartFile;
import scala.Int;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.io.File;
import java.io.InputStream;
import java.sql.Clob;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
@Component("docSimpleService")
@Slf4j
@Transactional
public class DocSimpleService {
//@Autowired
@@ -212,54 +210,7 @@ public class DocSimpleService {
public void saveBatchDocSimple(MultipartFile file) throws Exception {
String tableName = "wsda_20190528111300933_temp";
String fieldName = "case_no," +
" mlh," +
" archive_no," +
" note ," +
" pigeonhole_date," +
" archive_ctg_no," +
" archive_ctg_no_code," +
" retention," +
" retention_code," +
" filing_year," +
" fonds_no," +
" fonds_no_code," +
" sbt_word," +
" doc_no," +
" dagdm," +
" dagdm_code," +
" created_date," +
" object_quantity," +
" security_class," +
" security_class_code," +
" quantity," +
" piece_no," +
" kzbs," +
" kzbs_code," +
" folder_location," +
" damldm," +
" damldm_code," +
" maintitle," +
" responsibleby," +
" create_person," +
" create_date," +
" is_packeep," +
" is_packeep_code," +
" md5_code," +
" md5_check_date," +
" batch_id," +
" batch_name," +
" back_to_update_state," +
" is_process," +
" testtest_code," +
" archive_file_num," +
" departname," +
" departname_code," +
" lb," +
" lb_code," +
" lm," +
" lm_code," +
" archive_flag";
String fieldName = "case_no," + " mlh," + " archive_no," + " note ," + " pigeonhole_date," + " archive_ctg_no," + " archive_ctg_no_code," + " retention," + " retention_code," + " filing_year," + " fonds_no," + " fonds_no_code," + " sbt_word," + " doc_no," + " dagdm," + " dagdm_code," + " created_date," + " object_quantity," + " security_class," + " security_class_code," + " quantity," + " piece_no," + " kzbs," + " kzbs_code," + " folder_location," + " damldm," + " damldm_code," + " maintitle," + " responsibleby," + " create_person," + " create_date," + " is_packeep," + " is_packeep_code," + " md5_code," + " md5_check_date," + " batch_id," + " batch_name," + " back_to_update_state," + " is_process," + " testtest_code," + " archive_file_num," + " departname," + " departname_code," + " lb," + " lb_code," + " lm," + " lm_code," + " archive_flag";
InputStream in = file.getInputStream();
Workbook boxExcel = Workbook.getWorkbook(in);
Sheet boxSheet = boxExcel.getSheet(0);
@@ -697,17 +648,17 @@ public class DocSimpleService {
StringBuffer fieldName = new StringBuffer();
StringBuffer valueName = new StringBuffer();
HashMap fieldValueMap = packSqlObject.getFieldValueMap();
if(fieldValueMap.get("archive_no")!=null){
if (fieldValueMap.get("archive_no") != null) {
String archiveNo1 = fieldValueMap.get("archive_no").toString();
HashMap mapThree = new HashMap();
StringBuffer conditionSql1 = new StringBuffer();
conditionSql1.append("archive_no");
conditionSql1.append("=");
conditionSql1.append("'"+archiveNo1+"'");
conditionSql1.append("'" + archiveNo1 + "'");
mapThree.put("tableName", tableName);
mapThree.put("conditionSql", conditionSql1.toString());
List list = docSimpleMapper.selectObject(mapThree);
if (CollectionUtils.isNotEmpty(list)){
if (CollectionUtils.isNotEmpty(list)) {
return json = AjaxJson.returnExceptionInfo(archiveNo1 + "档号重复");
}
}
@@ -753,14 +704,14 @@ public class DocSimpleService {
entity.setOperatorChn(user3.getUserChnName());
entity.setOperateDate(date);
entity.setDescription("操作人[" + user3.getUsername() + "]在时间[" + DateUtil.date2String(date, 1) + "]新增了业务数据");
entity.setArgs("表名称为tableName=" + tableName+",数据为:"+ JSON.toJSONString(fieldValueMap));
entity.setArgs("表名称为tableName=" + tableName + ",数据为:" + JSON.toJSONString(fieldValueMap));
operLoggerService.addEntity(entity);
}
}
} catch (Exception e) {
json = AjaxJson.returnExceptionInfo("失败"+e);
json = AjaxJson.returnExceptionInfo("失败" + e);
}
return json;
}
@@ -805,7 +756,7 @@ public class DocSimpleService {
} else if (value.toString().contains("like")) {
String str = value.toString();
conditionSql.append(" and " + key + " like " + "'" + "%" + str.substring(4, str.length()) + "%" + "'");
}else if (value.toString().contains("null")) {
} else if (value.toString().contains("null")) {
String str = value.toString();
conditionSql.append(" and " + key + " is null or " + key + " = ''");
} else {
@@ -844,8 +795,8 @@ public class DocSimpleService {
}
// 判断是否需要附加数据权限
Object pid = conditionMap.get("pid");
if (pid==null && isRankJudgment(tableName1,"level") && ("".equals(funcTypeCode)||"temp".equals(funcTypeCode))){
conditionSql.append(" and (level >="+user.getLevel()+" or level is null) ");
if (pid == null && isRankJudgment(tableName1, "level") && ("".equals(funcTypeCode) || "temp".equals(funcTypeCode))) {
conditionSql.append(" and (level >=" + user.getLevel() + " or level is null) ");
}
// //添加日志
// if (user != null) {
@@ -866,28 +817,90 @@ public class DocSimpleService {
//map.put("fieldName", fieldName);
map.put("conditionSql", conditionSql.toString());
// 判断是否根据page_no 排序
if (packSqlObject.getOrderBy()!=null && packSqlObject.getOrderBy().contains("page_no")){
if (packSqlObject.getOrderBy() != null && packSqlObject.getOrderBy().contains("page_no")) {
packSqlObject.setOrderBy("order by file_name asc");
}
map.put("orderBy", !"".equals(packSqlObject.getOrderBy()) ? packSqlObject.getOrderBy() : "");
//将查询的数据加入redis缓存中
Cache<Object,Object> fiveSecondCache = guavaLocalCache.getFiveSecondCache();
fiveSecondCache.cleanUp();
fiveSecondCache.put("list", docSimpleMapper.selectObject(map));
// fiveSecondCache.invalidate();
// fiveSecondCache.getIfPresent();
// fiveSecondCache.cleanUp();
// redisUtil.del("list");
// redisUtil.lSet("list", docSimpleMapper.selectObject(map));
PageHelper.startPage(packSqlObject.getPage(), packSqlObject.getLimit());
return docSimpleMapper.selectObject(map);
// 添加分页限制防止OOM
Integer pageObj = packSqlObject.getPage();
Integer limitObj = packSqlObject.getLimit();
int page = pageObj != null ? pageObj : 1;
int limit = limitObj != null ? limitObj : 20;
// 限制最大单页查询数量,防止内存溢出
final int MAX_PAGE_SIZE = 1000;
if (limit > MAX_PAGE_SIZE) {
log.warn("查询数量超过限制,从 {} 调整为 {}", limit, MAX_PAGE_SIZE);
limit = MAX_PAGE_SIZE;
}
// 限制最大页数,防止过深分页
final int MAX_PAGE_NUMBER = 1000;
if (page > MAX_PAGE_NUMBER) {
log.warn("页数超过限制,从 {} 调整为 {}", page, MAX_PAGE_NUMBER);
page = MAX_PAGE_NUMBER;
}
// 检查内存使用情况
Runtime runtime = Runtime.getRuntime();
long usedMemory = runtime.totalMemory() - runtime.freeMemory();
long maxMemory = runtime.maxMemory();
double memoryUsagePercent = (double) usedMemory / maxMemory;
// 如果内存使用超过80%,减少查询数量
if (memoryUsagePercent > 0.8) {
limit = Math.min(limit, 50); // 高内存使用时限制为50条
log.warn("内存使用率过高({}%),限制查询数量为 {}",
String.format("%.1f", memoryUsagePercent * 100), limit);
}
// 使用分页查询
PageHelper.startPage(page, limit);
List<Map<String, Object>> result;
try {
result = docSimpleMapper.selectObject(map);
// 检查结果集大小,防止返回过多数据
if (result != null && result.size() > MAX_PAGE_SIZE) {
log.warn("查询结果超过限制,截取前 {} 条记录", MAX_PAGE_SIZE);
result = result.subList(0, MAX_PAGE_SIZE);
}
} catch (Exception e) {
log.error("查询数据失败,可能是内存不足: {}", e.getMessage());
// 降级处理:返回少量数据
PageHelper.startPage(1, 10);
try {
result = docSimpleMapper.selectObject(map);
log.info("降级查询成功,返回 {} 条记录", result != null ? result.size() : 0);
} catch (Exception fallbackException) {
log.error("降级查询也失败: {}", fallbackException.getMessage());
return new ArrayList<>(); // 返回空列表,避免系统崩溃
}
}
// 缓存小结果集
if (result != null && result.size() <= 100) {
try {
Cache<Object, Object> fiveSecondCache = guavaLocalCache.getFiveSecondCache();
fiveSecondCache.cleanUp();
fiveSecondCache.put("list", result);
} catch (Exception e) {
log.warn("缓存查询结果失败: {}", e.getMessage());
}
}
return result;
}
public boolean isRankJudgment(String table ,String level){
Map<String,Object> map = new HashMap<>();
map.put("columnName",level);
map.put("tableName",table);
public boolean isRankJudgment(String table, String level) {
Map<String, Object> map = new HashMap<>();
map.put("columnName", level);
map.put("tableName", table);
List<TtableStructDescription> ttableStructDescriptions = ttableStructDescriptionService.selectByTableNameAndColumnName(map);
if(CollectionUtils.isNotEmpty(ttableStructDescriptions)){
if (CollectionUtils.isNotEmpty(ttableStructDescriptions)) {
return true;
}
return false;
@@ -1084,9 +1097,9 @@ public class DocSimpleService {
//map.put("fieldName", fieldName);
map.put("conditionSql", conditionSql.toString());
map.put("orderBy", !"".equals(packSqlObject.getOrderBy()) ? packSqlObject.getOrderBy() : "");
int page=packSqlObject.getPage()-1<0?0:packSqlObject.getPage()-1;
int limit=packSqlObject.getLimit()-page<=0?1: packSqlObject.getLimit()-page;
map.put("limit","limit"+" "+page+","+limit);
int page = packSqlObject.getPage() - 1 < 0 ? 0 : packSqlObject.getPage() - 1;
int limit = packSqlObject.getLimit() - page <= 0 ? 1 : packSqlObject.getLimit() - page;
map.put("limit", "limit" + " " + page + "," + limit);
// PageHelper.startPage(packSqlObject.getPage(), packSqlObject.getLimit());
return docSimpleMapper.selectObjectLimit(map);
}
@@ -1141,7 +1154,7 @@ public class DocSimpleService {
StringBuffer conditionSql = new StringBuffer();
conditionSql.append("1=1");
HashMap conditionMap = packSqlObject.getConditionMap();
if(conditionMap.isEmpty()){
if (conditionMap.isEmpty()) {
return json = AjaxJson.returnExceptionInfo("conditionMap为空");
}
Set set2 = conditionMap.keySet();
@@ -1249,7 +1262,7 @@ public class DocSimpleService {
StringBuffer conditionSql = new StringBuffer();
conditionSql.append("1=1");
HashMap conditionMap = packSqlObject.getConditionMap();
if(conditionMap.get("ids")==null||"".equals(conditionMap.get("ids"))){
if (conditionMap.get("ids") == null || "".equals(conditionMap.get("ids"))) {
return json = AjaxJson.returnExceptionInfo("conditionMap.get(\"ids\")为空");
}
Set set2 = conditionMap.keySet();
@@ -1599,11 +1612,26 @@ public class DocSimpleService {
//根据原文内容查找原文名称和原文内容全部信息
public List<DocOriginalEntity> selectFileNameAndContentByFileContent(String tableName, String fileContent, int recId) {
/**
* 查询文件表
*/
String fileTable = tableName + "_file";
Map<String, Object> map = new HashMap<>();
map.put("tableName", tableName);
map.put("tableName", fileTable);
map.put("fileContent", fileContent);
map.put("recId", recId);
List<DocOriginalEntity> list = docSimpleMapper.selectFileNameAndContentByFileContent(map);
long count = list.stream().filter(item -> item.getFileName().contains("null")).count();
if (count > 0) {
String archiveNo = docSimpleMapper.selectArchiveNo(tableName, recId);
List<DocOriginalEntity> collect = list.stream().peek(item -> {
if (item.getFileName().contains("null")) {
item.setFileName(item.getFileName().replace("null", archiveNo));
}
}).collect(Collectors.toList());
docSimpleMapper.updateFileName(fileTable,collect);
return collect;
}
return docSimpleMapper.selectFileNameAndContentByFileContent(map);
}
@@ -1812,21 +1840,21 @@ public class DocSimpleService {
piece_no = (String) value;
Integer i = Integer.parseInt(piece_no);
i = i + offset;
if (i<=0){
return AjaxJson.returnExceptionInfo("档号:"+archive_no+" 格式不对");
if (i <= 0) {
return AjaxJson.returnExceptionInfo("档号:" + archive_no + " 格式不对");
}
piece_no = i.toString();
} else {
Integer folder_no_int = (Integer) value;
folder_no_int = folder_no_int + offset;
if (folder_no_int<=0){
return AjaxJson.returnExceptionInfo("档号:"+archive_no+" 格式不对");
if (folder_no_int <= 0) {
return AjaxJson.returnExceptionInfo("档号:" + archive_no + " 格式不对");
}
piece_no = folder_no_int.toString();
}
//修改后的档号
String substring = archive_no.substring(0, archive_no.lastIndexOf('-'));
String update_piece_no = maxPieceNoZeroFilling(Integer.parseInt(piece_no),archive_no.substring(archive_no.lastIndexOf('-')).length()-1);
String update_piece_no = maxPieceNoZeroFilling(Integer.parseInt(piece_no), archive_no.substring(archive_no.lastIndexOf('-')).length() - 1);
//最终档号名称
String update_archive_no = substring + "-" + update_piece_no;
@@ -1862,20 +1890,20 @@ public class DocSimpleService {
year_folder_no = (String) value;
Integer i = Integer.parseInt(year_folder_no);
i = i + offset;
if (i<=0){
return AjaxJson.returnExceptionInfo("案卷级档号:"+folder_no+" 格式不对");
if (i <= 0) {
return AjaxJson.returnExceptionInfo("案卷级档号:" + folder_no + " 格式不对");
}
year_folder_no = i.toString();
} else {
Integer folder_no_int = (Integer) value;
folder_no_int = folder_no_int + offset;
if (folder_no_int<=0){
return AjaxJson.returnExceptionInfo("案卷级档号:"+folder_no+" 格式不对");
if (folder_no_int <= 0) {
return AjaxJson.returnExceptionInfo("案卷级档号:" + folder_no + " 格式不对");
}
year_folder_no = folder_no_int.toString();
}
String substring = folder_no.substring(0, folder_no.lastIndexOf('-'));
String update_year_folder_no = maxPieceNoZeroFilling(Integer.parseInt(year_folder_no),folder_no.substring(folder_no.lastIndexOf('-')).length()-1);
String update_year_folder_no = maxPieceNoZeroFilling(Integer.parseInt(year_folder_no), folder_no.substring(folder_no.lastIndexOf('-')).length() - 1);
//最终案卷机档号
String update_archive_no = substring + "-" + update_year_folder_no;
@@ -1955,21 +1983,21 @@ public class DocSimpleService {
piece_no = (String) value;
Integer i = Integer.parseInt(piece_no);
i = i + offset;
if (i<=0){
return AjaxJson.returnExceptionInfo("档号:"+archive_no+" 格式不对");
if (i <= 0) {
return AjaxJson.returnExceptionInfo("档号:" + archive_no + " 格式不对");
}
piece_no = i.toString();
} else {
Integer folder_no_int = (Integer) value;
folder_no_int = folder_no_int + offset;
if (folder_no_int<=0){
return AjaxJson.returnExceptionInfo("档号:"+archive_no+" 格式不对");
if (folder_no_int <= 0) {
return AjaxJson.returnExceptionInfo("档号:" + archive_no + " 格式不对");
}
piece_no = folder_no_int.toString();
}
//修改后的档号
String substring = archive_no.substring(0, archive_no.lastIndexOf('-'));
String update_piece_no = maxPieceNoZeroFilling(Integer.parseInt(piece_no),archive_no.substring(archive_no.lastIndexOf('-')).length()-1);
String update_piece_no = maxPieceNoZeroFilling(Integer.parseInt(piece_no), archive_no.substring(archive_no.lastIndexOf('-')).length() - 1);
//最终档号名称
String update_archive_no = substring + "-" + update_piece_no;
//修改档号
@@ -2006,16 +2034,16 @@ public class DocSimpleService {
old_item_id = item_id;
Integer i = Integer.parseInt(item_id);
i = i + offset;
if (i<=0){
return AjaxJson.returnExceptionInfo("项目号:"+item_id+" 不对");
if (i <= 0) {
return AjaxJson.returnExceptionInfo("项目号:" + item_id + " 不对");
}
item_id = i.toString();
} else {
Integer folder_no_int = (Integer) value;
old_item_id = old_item_id;
folder_no_int = folder_no_int + offset;
if (folder_no_int<=0){
return AjaxJson.returnExceptionInfo("项目号:"+item_id+" 不对");
if (folder_no_int <= 0) {
return AjaxJson.returnExceptionInfo("项目号:" + item_id + " 不对");
}
item_id = folder_no_int.toString();
}
@@ -2145,20 +2173,20 @@ public class DocSimpleService {
year_folder_no = (String) value;
Integer i = Integer.parseInt(year_folder_no);
i = i + offset;
if (i<=0){
return AjaxJson.returnExceptionInfo("案卷级档号:"+folder_no+" 不对");
if (i <= 0) {
return AjaxJson.returnExceptionInfo("案卷级档号:" + folder_no + " 不对");
}
year_folder_no = i.toString();
} else {
Integer folder_no_int = (Integer) value;
folder_no_int = folder_no_int + offset;
if (folder_no_int<=0){
return AjaxJson.returnExceptionInfo("案卷级档号:"+folder_no+" 不对");
if (folder_no_int <= 0) {
return AjaxJson.returnExceptionInfo("案卷级档号:" + folder_no + " 不对");
}
year_folder_no = folder_no_int.toString();
}
String substring = folder_no.substring(0, folder_no.lastIndexOf('-'));
String update_year_folder_no = maxPieceNoZeroFilling(Integer.parseInt(year_folder_no),folder_no.substring(folder_no.lastIndexOf('-')).length()-1);
String update_year_folder_no = maxPieceNoZeroFilling(Integer.parseInt(year_folder_no), folder_no.substring(folder_no.lastIndexOf('-')).length() - 1);
//最终案卷机档号
String update_archive_no = substring + "-" + update_year_folder_no;
@@ -2239,21 +2267,21 @@ public class DocSimpleService {
piece_no = (String) value;
Integer i = Integer.parseInt(piece_no);
i = i + offset;
if (i<=0){
return AjaxJson.returnExceptionInfo("档号:"+archive_no+" 不对");
if (i <= 0) {
return AjaxJson.returnExceptionInfo("档号:" + archive_no + " 不对");
}
piece_no = i.toString();
} else {
Integer folder_no_int = (Integer) value;
folder_no_int = folder_no_int + offset;
if (folder_no_int<=0){
return AjaxJson.returnExceptionInfo("档号:"+archive_no+" 不对");
if (folder_no_int <= 0) {
return AjaxJson.returnExceptionInfo("档号:" + archive_no + " 不对");
}
piece_no = folder_no_int.toString();
}
//修改后的档号
String substring = archive_no.substring(0, archive_no.lastIndexOf('-'));
String update_piece_no = maxPieceNoZeroFilling(Integer.parseInt(piece_no),archive_no.substring(archive_no.lastIndexOf('-')).length()-1);
String update_piece_no = maxPieceNoZeroFilling(Integer.parseInt(piece_no), archive_no.substring(archive_no.lastIndexOf('-')).length() - 1);
//最终档号名称
String update_archive_no = substring + "-" + update_piece_no;
//修改档号
@@ -2281,18 +2309,18 @@ public class DocSimpleService {
* @param pieceNo
* @return 001, 002, 003
*/
public String maxPieceNoZeroFilling(Integer pieceNo,Integer len) {
public String maxPieceNoZeroFilling(Integer pieceNo, Integer len) {
//根据原本的长度来设置件号
String result = "";
if (len==1){
result =""+pieceNo;
}else if (len==2){
if (pieceNo<10){
result="0"+pieceNo;
}else {
result=""+pieceNo;
if (len == 1) {
result = "" + pieceNo;
} else if (len == 2) {
if (pieceNo < 10) {
result = "0" + pieceNo;
} else {
result = "" + pieceNo;
}
}else if (len==3){
} else if (len == 3) {
if (pieceNo < 10) {
result = "00" + pieceNo;
} else if (pieceNo < 100 && pieceNo >= 10) {
@@ -2300,25 +2328,25 @@ public class DocSimpleService {
} else if (pieceNo < 1000 && pieceNo >= 100) {
result = "" + pieceNo;
}
}else if (len==4){
} else if (len == 4) {
if (pieceNo < 10) {
result = "000" + pieceNo;
} else if (pieceNo < 100 && pieceNo >= 10) {
result = "00" + pieceNo;
} else if (pieceNo < 1000 && pieceNo >= 100) {
result = "0" + pieceNo;
}else {
result =""+pieceNo;
} else {
result = "" + pieceNo;
}
}else if (len==5){
} else if (len == 5) {
if (pieceNo < 10) {
result = "0000" + pieceNo;
} else if (pieceNo < 100 && pieceNo >= 10) {
result = "000" + pieceNo;
} else if (pieceNo < 1000 && pieceNo >= 100) {
result = "00" + pieceNo;
}else if (pieceNo < 10000 && pieceNo >= 1000){
result ="0"+pieceNo;
} else if (pieceNo < 10000 && pieceNo >= 1000) {
result = "0" + pieceNo;
}
}
// if (pieceNo < 10) {
@@ -2339,24 +2367,24 @@ public class DocSimpleService {
Set<Map.Entry> set = conditionMap.entrySet();
//获取redis里面的所有值
// List<Object> list = redisUtil.lGet("list", 0, -1);
Cache<Object,Object> fiveSecondCache = guavaLocalCache.getFiveSecondCache();
Cache<Object, Object> fiveSecondCache = guavaLocalCache.getFiveSecondCache();
List<Object> list = (List<Object>) fiveSecondCache.getIfPresent("list");
List<Object> resultList = new ArrayList<>();
if(conditionMap.isEmpty()){
if (conditionMap.isEmpty()) {
resultList = list;
}else{
if(CollectionUtils.isNotEmpty(list)){
} else {
if (CollectionUtils.isNotEmpty(list)) {
for (Object o : list) {
Map<String,Object> map = (Map<String, Object>) o;
Map<String, Object> map = (Map<String, Object>) o;
int i = 0;
for (Map.Entry entry : set) {
String key = entry.getKey().toString();
String value = entry.getValue().toString();
if(map.get(key).toString().contains(value)){
if (map.get(key).toString().contains(value)) {
i++;
}
}
if(i==set.size()){
if (i == set.size()) {
resultList.add(map);
}
}
@@ -2365,17 +2393,15 @@ public class DocSimpleService {
}
List list1 = PageUtil.startPage(resultList, packSqlObject.getPage(), packSqlObject.getLimit());
json.put("list",list1);
json.put("total",resultList.size());
json.put("list", list1);
json.put("total", resultList.size());
return json;
}
//向上移
public AjaxJson moveUp(
Integer fileId, String funcTypeCode,String tableName
) {
public AjaxJson moveUp(Integer fileId, String funcTypeCode, String tableName) {
AjaxJson json = null;
try {
tableName = tableName + "_" + funcTypeCode;
@@ -2389,15 +2415,15 @@ public class DocSimpleService {
json = AjaxJson.returnExceptionInfo("已是最前了");
return json;
}
Integer recId = (Integer)map.get("rec_id");
Integer recId = (Integer) map.get("rec_id");
String sql1 = "select * from " + tableName + " where rec_id = " + recId + " and page_no < " + pageNo + " order by page_no desc limit 1";
// ArchiveFile archiveFileUpOne = archiveFileService.getArchiveFileUpOne(parasMap);
List<Map<String, Object>> list1 = directorySeqMapper.executeSqlList(sql1);
Map<String, Object> map1 = list1.get(0);
Integer pageNo1 = (Integer)map1.get("page_no");
Integer id = (Integer)map1.get("id");
Integer pageNo1 = (Integer) map1.get("page_no");
Integer id = (Integer) map1.get("id");
//1.把数据库和图片都上移动一格(当前选中的一条图片)
if (true) {
HashMap maps = new HashMap();
@@ -2425,9 +2451,7 @@ public class DocSimpleService {
//向下移
public AjaxJson moveDown(
Integer fileId, String funcTypeCode,String tableName
) {
public AjaxJson moveDown(Integer fileId, String funcTypeCode, String tableName) {
AjaxJson json = null;
try {
tableName = tableName + "_" + funcTypeCode;
@@ -2437,7 +2461,7 @@ public class DocSimpleService {
// ArchiveFile archiveFile = archiveFileService.selectByPrimaryKey(fileId);
Map<String, Object> map = list.get(0);
Integer pageNo = (Integer) map.get("page_no");
Integer recId = (Integer)map.get("rec_id");
Integer recId = (Integer) map.get("rec_id");
String sql2 = "select count(*) as num from " + tableName + " where rec_id = " + recId;
List<Map<String, Object>> list2 = directorySeqMapper.executeSqlList(sql2);
Map<String, Object> map2 = list2.get(0);
@@ -2453,8 +2477,8 @@ public class DocSimpleService {
// ArchiveFile archiveFileUpOne = archiveFileService.getArchiveFileUpOne(parasMap);
List<Map<String, Object>> list1 = directorySeqMapper.executeSqlList(sql1);
Map<String, Object> map1 = list1.get(0);
Integer pageNo1 = (Integer)map1.get("page_no");
Integer id = (Integer)map1.get("id");
Integer pageNo1 = (Integer) map1.get("page_no");
Integer id = (Integer) map1.get("id");
//1.把数据库和图片都上移动一格(当前选中的一条图片)
if (true) {
HashMap maps = new HashMap();
@@ -2481,17 +2505,14 @@ public class DocSimpleService {
}
//重命名
public AjaxJson rename(
Integer fileId, String funcTypeCode,String tableName,String name
) {
public AjaxJson rename(Integer fileId, String funcTypeCode, String tableName, String name) {
AjaxJson json = null;
try {
tableName = tableName + "_" + funcTypeCode;
HashMap maps = new HashMap();
maps.put("tableName", tableName);
maps.put("fieldValue", " file_name = " + "'" +name+"'");
maps.put("fieldValue", " file_name = " + "'" + name + "'");
maps.put("conditionSql", " id = " + fileId);
docSimpleMapper.updateObject(maps);
@@ -2575,7 +2596,7 @@ public class DocSimpleService {
if (CollectionUtils.isNotEmpty(archives)) {
for (Map<String, Object> archive : archives) {
int filingYear = 0;
if (archive.get("filing_year") != null&&ToolSelf.isNumeric(archive.get("filing_year").toString())) {
if (archive.get("filing_year") != null && ToolSelf.isNumeric(archive.get("filing_year").toString())) {
filingYear = Integer.parseInt(archive.get("filing_year").toString());
}
String retention = "";

View File

@@ -37,19 +37,113 @@ import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.stereotype.Service;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
@Slf4j
@Service
@EnableAsync
public class ElasticArchiveDao {
public static RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
/**
* 驼峰转蛇形命名
* 例如: userName -> user_name
*/
private String camelToSnake(String camelCase) {
if (camelCase == null || camelCase.isEmpty()) {
return camelCase;
}
StringBuilder snakeCase = new StringBuilder();
snakeCase.append(Character.toLowerCase(camelCase.charAt(0)));
for (int i = 1; i < camelCase.length(); i++) {
char c = camelCase.charAt(i);
if (Character.isUpperCase(c)) {
snakeCase.append('_').append(Character.toLowerCase(c));
} else {
snakeCase.append(c);
}
}
return snakeCase.toString();
}
/**
* 蛇形转驼峰命名
* 例如: user_name -> userName
*/
private String snakeToCamel(String snakeCase) {
if (snakeCase == null || snakeCase.isEmpty()) {
return snakeCase;
}
StringBuilder camelCase = new StringBuilder();
boolean nextUpperCase = false;
for (int i = 0; i < snakeCase.length(); i++) {
char c = snakeCase.charAt(i);
if (c == '_') {
nextUpperCase = true;
} else {
if (nextUpperCase) {
camelCase.append(Character.toUpperCase(c));
nextUpperCase = false;
} else {
camelCase.append(Character.toLowerCase(c));
}
}
}
return camelCase.toString();
}
/**
* 检查两个字段名是否相等(支持驼峰和蛇形命名转换)
* @param field1 字段名1
* @param field2 字段名2
* @return 是否相等
*/
private boolean isFieldEqual(String field1, String field2) {
if (field1 == null || field2 == null) {
return field1 == field2;
}
// 直接比较
if (field1.equals(field2)) {
return true;
}
// 驼峰转蛇形比较
String field1Snake = camelToSnake(field1);
String field2Snake = camelToSnake(field2);
if (field1Snake.equals(field2Snake)) {
return true;
}
// 蛇形转驼峰比较
String field1Camel = snakeToCamel(field1);
String field2Camel = snakeToCamel(field2);
if (field1Camel.equals(field2Camel)) {
return true;
}
// 交叉比较field1驼峰转蛇形 vs field2原值field2驼峰转蛇形 vs field1原值
if (field1Snake.equals(field2) || field2Snake.equals(field1)) {
return true;
}
if (field1Camel.equals(field2) || field2Camel.equals(field1)) {
return true;
}
return false;
}
// 初始化api客户端
/* public static RestHighLevelClient client = new RestHighLevelClient(
RestClient.builder(new HttpHost("localhost", 9200, "http")));*/
@@ -128,25 +222,7 @@ public class ElasticArchiveDao {
}
// 关键字搜索 指定匹配类型
public AjaxJson searchProductList(String indexName,
String type,
String fieldName,
String keyword,
String fieldName2,
String keyword2,
String fieldName3,
String keyword3,
String fieldName4,
String keyword4,
String fieldName5,
String keyword5,
String fieldName6,
String keyword6,
Boolean bool,
boolean ordinaryRole,
String entityIds,
Integer page,
Integer limit) throws Exception {
public AjaxJson searchProductList(String indexName, String type, String fieldName, String keyword, String fieldName2, String keyword2, String fieldName3, String keyword3, String fieldName4, String keyword4, String fieldName5, String keyword5, String fieldName6, String keyword6, Boolean bool, boolean ordinaryRole, String entityIds, Integer page, Integer limit) throws Exception {
AjaxJson ajaxJson = new AjaxJson();
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().trackTotalHits(true);
@@ -238,9 +314,69 @@ public class ElasticArchiveDao {
SearchResponse searchResponse = client.search(searchRequest, options.build());
SearchHits hits = searchResponse.getHits();
List<Map> json = new ArrayList();
for (SearchHit hit : hits.getHits()) {
json.add(hit.getSourceAsMap());
OpenControlExample example0 = new OpenControlExample();
example0.createCriteria().andOpenTypeEqualTo(1);
List<OpenControl> openControls = openControlMapper.selectByExample(example0);
// 记录开放控制规则数量
log.info("应用开放控制规则数量: {}", openControls.size());
// 打印所有开放控制规则详情
for (OpenControl oc : openControls) {
log.info("开放控制规则 - ID: {}, 字段: {}, 值: {}",
oc.getId(), oc.getFieldEg(), oc.getValue());
}
int processedHits = 0;
int hiddenFieldsCount = 0;
for (SearchHit hit : hits.getHits()) {
Map<String, Object> res = new HashMap<>();
String hitId = hit.getId();
Map<String, Object> sourceMap = hit.getSourceAsMap();
log.info("处理文档ID: {}, 文档内容: {}", hitId, sourceMap);
AtomicInteger currentHitHiddenFields = new AtomicInteger();
sourceMap.forEach((key, value) -> {
boolean fieldHidden = false;
for (OpenControl openControl : openControls) {
String controlField = openControl.getFieldEg();
String controlValue = openControl.getValue();
log.debug("检查字段 - 文档ID: {}, 字段名: {}, 字段值: {}, 控制字段: {}, 控制值: {}",
hitId, key, value, controlField, controlValue);
// 使用字段名相等性检查(支持驼峰和蛇形命名转换)
if (isFieldEqual(key, controlField) && value != null && value.equals(controlValue)) {
res.put(key, "");
fieldHidden = true;
currentHitHiddenFields.getAndIncrement();
// 记录字段被隐藏的详细信息
log.info("字段隐藏处理 - 文档ID: {}, 字段名: {}, 字段值: {}, 控制规则ID: {}, 控制字段: {}, 控制值: {}",
hitId, key, value, openControl.getId(), controlField, controlValue);
break;
}
}
if (!fieldHidden) {
res.put(key, value);
}
});
if (currentHitHiddenFields.get() > 0) {
log.info("文档ID: {} 共隐藏了 {} 个字段", hitId, currentHitHiddenFields);
res.put("controlIdentifier", "隐藏");
}
json.add(res);
processedHits++;
}
log.info("开放控制处理完成 - 处理文档数: {}, 总隐藏字段数: {}", processedHits, hiddenFieldsCount);
ajaxJson.put("json", json);
ajaxJson.put("total", searchResponse.getHits().getTotalHits().value);
@@ -275,22 +411,22 @@ public class ElasticArchiveDao {
}
public AjaxJson advancedSearch(Map<String,Object> param){
public AjaxJson advancedSearch(Map<String, Object> param) {
AjaxJson json = new AjaxJson();
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().trackTotalHits(true);
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
try {
String indexName = StringUtil.formatMap(param,"indexName");
String type = StringUtil.formatMap(param,"type");
Integer page = Integer.parseInt(StringUtil.formatMap(param,"page"));
Integer limit = Integer.parseInt(StringUtil.formatMap(param,"limit"));
List<Map<String,Object>> conditions = (List<Map<String,Object>>) param.get("conditions");
if(CollectionUtils.isNotEmpty(conditions)){
for (Map<String,Object> item : conditions) {
String relation = StringUtil.formatMap(item,"relation");
String searchCondition = StringUtil.formatMap(item,"condition");
String fetchName =StringUtil.formatMap(item,"tagName");
String tagValue =StringUtil.formatMap(item,"tagValue");
String indexName = StringUtil.formatMap(param, "indexName");
String type = StringUtil.formatMap(param, "type");
Integer page = Integer.parseInt(StringUtil.formatMap(param, "page"));
Integer limit = Integer.parseInt(StringUtil.formatMap(param, "limit"));
List<Map<String, Object>> conditions = (List<Map<String, Object>>) param.get("conditions");
if (CollectionUtils.isNotEmpty(conditions)) {
for (Map<String, Object> item : conditions) {
String relation = StringUtil.formatMap(item, "relation");
String searchCondition = StringUtil.formatMap(item, "condition");
String fetchName = StringUtil.formatMap(item, "tagName");
String tagValue = StringUtil.formatMap(item, "tagValue");
BoolQueryBuilder currentConditionQuery = QueryBuilders.boolQuery();
switch (searchCondition) {
case "equal":
@@ -322,7 +458,7 @@ public class ElasticArchiveDao {
// 根据 relation 的值决定如何添加当前条件到主查询构建器中
if ("or".equalsIgnoreCase(relation)) {
boolQueryBuilder.should(currentConditionQuery);
}else{
} else {
boolQueryBuilder.must(currentConditionQuery);
}
@@ -353,7 +489,7 @@ public class ElasticArchiveDao {
}
json.put("json", data);
json.put("total", searchResponse.getHits().getTotalHits().value);
}else {
} else {
json = AjaxJson.returnExceptionInfo("请输出查询条件");
}
} catch (Exception e) {
@@ -369,10 +505,7 @@ public class ElasticArchiveDao {
// 关键字搜索 指定匹配类型
public AjaxJson searchAll(String indexName,
String type,
Integer page,
Integer limit) throws IOException {
public AjaxJson searchAll(String indexName, String type, Integer page, Integer limit) throws IOException {
AjaxJson ajaxJson = new AjaxJson();
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().trackTotalHits(true);
@@ -597,10 +730,7 @@ public class ElasticArchiveDao {
//openType:0-开放1-不开放
public String selectOpenControlByEntity(Integer entityId,
Integer type,
String archiveNo,
String tableName) {
public String selectOpenControlByEntity(Integer entityId, Integer type, String archiveNo, String tableName) {
String result = "隐藏";
String conditionSql = "";
if (type == 1) {
@@ -608,6 +738,10 @@ public class ElasticArchiveDao {
} else {
conditionSql = " 1=1 and folder_no in ('" + archiveNo + "') ";
}
log.info("开放控制检查开始 - entityId: {}, type: {}, archiveNo: {}, tableName: {}, conditionSql: {}",
entityId, type, archiveNo, tableName, conditionSql);
//得到数据列表
Map<String, Object> parasMap = new HashMap<String, Object>();
parasMap.put("tableName", tableName);
@@ -617,22 +751,66 @@ public class ElasticArchiveDao {
//开放
OpenControlExample example0 = new OpenControlExample();
example0.setOrderByClause("sort");
example0.createCriteria()
.andEntityIdEqualTo(entityId)
.andOpenTypeEqualTo(0);
example0.createCriteria().andEntityIdEqualTo(entityId).andOpenTypeEqualTo(0);
List<OpenControl> openControlList0 = openControlMapper.selectByExample(example0);
log.info("查询到数据记录数: {}, 开放控制规则数: {}", dataList.size(), openControlList0.size());
// 打印所有数据记录的详细信息
for (int i = 0; i < dataList.size(); i++) {
Map<String, Object> map = dataList.get(i);
log.info("数据记录[{}]: {}", i, map);
}
// 打印所有开放控制规则
for (OpenControl oc : openControlList0) {
log.info("开放控制规则 - ID: {}, 字段: {}, 值: {}",
oc.getId(), oc.getFieldEg(), oc.getValue());
}
for (Map<String, Object> map : dataList) {
log.debug("检查数据记录: {}", map);
for (OpenControl openControl : openControlList0) {
if (map.get(openControl.getFieldEg()).equals(openControl.getValue())) {
String controlField = openControl.getFieldEg();
String controlValue = openControl.getValue();
// 遍历数据记录的所有字段,查找匹配的控制字段
boolean fieldMatched = false;
Object matchedFieldValue = null;
String matchedFieldName = null;
for (Map.Entry<String, Object> entry : map.entrySet()) {
String dataField = entry.getKey();
Object dataValue = entry.getValue();
// 使用字段名相等性检查(支持驼峰和蛇形命名转换)
if (isFieldEqual(dataField, controlField)) {
fieldMatched = true;
matchedFieldValue = dataValue;
matchedFieldName = dataField;
break;
}
}
log.debug("比较字段 - 控制字段: {}, 匹配的数据字段: {}, 字段值: {}, 控制值: {}, 规则ID: {}",
controlField, matchedFieldName, matchedFieldValue, controlValue, openControl.getId());
if (fieldMatched && matchedFieldValue != null && matchedFieldValue.equals(controlValue)) {
result = "开放";
log.info("开放控制匹配成功 - 数据记录: {}, 控制字段: {}, 匹配字段: {}, 字段值: {}, 控制值: {}, 规则ID: {}",
map, controlField, matchedFieldName, matchedFieldValue, controlValue, openControl.getId());
break;
}
}
if ("开放".equals(result)) {
break;
}
}
log.info("开放控制检查结果: {} (entityId: {}, archiveNo: {})", result, entityId, archiveNo);
return result;
}
@@ -640,12 +818,7 @@ public class ElasticArchiveDao {
try {
String indexName0 = "xinchuang";
String type0 = "dangan";
AjaxJson json = searchAll(
indexName0,
type0,
1,
2000000
);
AjaxJson json = searchAll(indexName0, type0, 1, 2000000);
List<Map> list = (List<Map>) json.getBody().get("json");
for (Map map : list) {
@@ -655,12 +828,7 @@ public class ElasticArchiveDao {
String archiveNo = StringUtil.formatMap(map, "archiveNo");
String tableName = StringUtil.formatMap(map, "tableName");
String kaifang = selectOpenControlByEntity(
entityId,
type,
archiveNo,
tableName
);
String kaifang = selectOpenControlByEntity(entityId, type, archiveNo, tableName);
MyArchives product = new MyArchives();
product.setOpen(kaifang);

View File

@@ -7,6 +7,8 @@ import com.bstek.ureport.Utils;
import com.bstek.ureport.export.ExportConfigure;
import com.bstek.ureport.export.ExportConfigureImpl;
import com.bstek.ureport.export.ExportManager;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.point.strategy.archiveNoSet.bean.ArchiveNoFormat;
@@ -53,13 +55,13 @@ import javax.servlet.http.HttpServletRequest;
public class FourCheckService {
@Autowired
private FourCheckMapper fourCheckMapper;
@Autowired
FourCheckSettingMapper fourCheckSettingMapper;
@Autowired
FourCheckStandardMapper fourCheckStandardMapper;
@Autowired
TtableDescriptionMapper ttableDescriptionMapper;
@@ -80,9 +82,35 @@ public class FourCheckService {
@Autowired
MetadataStandardMapper metadataStandardMapper;
private ObjectMapper objectMapper = new ObjectMapper();
private static final Logger logger = LoggerFactory.getLogger(FourCheckService.class);
/**
* 对超长字符串进行软换行避免在PDF中单行过宽/过高造成分页异常。
* 简单实现:按固定列宽断行,同时保留已有的换行符。
*/
private static List<String> softWrap(String text, int maxCols) {
List<String> lines = new ArrayList<>();
if (text == null) {
return lines;
}
String[] rawLines = text.replace("\r\n", "\n").replace('\r', '\n').split("\n", -1);
for (String raw : rawLines) {
if (raw.length() <= maxCols) {
lines.add(raw);
} else {
int start = 0;
while (start < raw.length()) {
int end = Math.min(start + maxCols, raw.length());
lines.add(raw.substring(start, end));
start = end;
}
}
}
return lines;
}
@Transactional
public AjaxJson<Object> addEntity(FourCheck entity) {
AjaxJson<Object> result = null;
@@ -210,11 +238,55 @@ public class FourCheckService {
String pass = "通过";
if ("0".equals((new StringBuilder()).append(map1.get("pass")).append("").toString()))
pass = "不通过";
map1.put("pass", pass);
map1.put("checkLink", checkLink);
map1.put("dataFormat", dataFormat);
map1.put("filePath", fourCheck.getFilePath());
returnList.add(map1);
Object msgObj = map1.get("msg");
// 将可能很长的列表消息拆分为多行/多条记录,避免单行过高导致 iText 无限循环异常
List<String> msgLines = new ArrayList<>();
if (msgObj instanceof List) {
for (Object o : (List<?>) msgObj) {
// 对每条内容进行软换行展开
msgLines.addAll(softWrap(String.valueOf(o), 60));
}
} else if (msgObj instanceof com.alibaba.fastjson.JSONArray) {
com.alibaba.fastjson.JSONArray arr = (com.alibaba.fastjson.JSONArray) msgObj;
for (Object o : arr) {
msgLines.addAll(softWrap(String.valueOf(o), 60));
}
} else if (msgObj != null) {
msgLines.addAll(softWrap(String.valueOf(msgObj), 60));
}
// 分块,每块最多 N 行,避免某一行高度超过页面
final int CHUNK = 15;
if (msgLines.isEmpty()) {
Map<String, Object> row = new HashMap<>();
row.putAll(map1);
row.put("type", typeChn);
row.put("pass", pass);
row.put("checkLink", checkLink);
row.put("dataFormat", dataFormat);
row.put("filePath", fourCheck.getFilePath());
row.put("createdDate", dateFormat.format(fourCheck.getCreatedDate()));
row.put("userChnName", fourCheck.getCreatedBy());
row.put("msg", "");
returnList.add(row);
} else {
for (int i = 0; i < msgLines.size(); i += CHUNK) {
int end = Math.min(i + CHUNK, msgLines.size());
String chunkText = String.join("\n", msgLines.subList(i, end));
Map<String, Object> row = new HashMap<>();
row.putAll(map1);
row.put("type", typeChn);
row.put("pass", pass);
row.put("checkLink", checkLink);
row.put("dataFormat", dataFormat);
row.put("filePath", fourCheck.getFilePath());
row.put("createdDate", dateFormat.format(fourCheck.getCreatedDate()));
row.put("userChnName", fourCheck.getCreatedBy());
row.put("msg", chunkText);
returnList.add(row);
}
}
}
// Collections.sort(returnList, (Comparator<? super Map<String, Object>>)new Object());
// String pathName = "E:/zzrsda/point-strategy/src/main/webapp/pdffile";
@@ -236,13 +308,18 @@ public class FourCheckService {
try {
SimpleDateFormat df = new SimpleDateFormat("yyyyMMddHHmmssSSS");
String currentName = "fourcheck_" + df.format(new Date());
// 确保输出目录存在,避免因目录缺失导致 FileNotFoundException
File outDir = new File(pathName);
if (!outDir.exists()) {
outDir.mkdirs();
}
outputStream = new FileOutputStream(new File(pathName + File.separator + currentName + ".pdf"));
ExportConfigureImpl exportConfigureImpl = new ExportConfigureImpl("file:四性检测报表.ureport.xml", map, outputStream);
ExportManager exportManager = (ExportManager) Utils.getApplicationContext().getBean("ureport.exportManager");
exportManager.exportPdf((ExportConfigure)exportConfigureImpl);
return currentName;
} catch (Exception e) {
logger.error("导出四性检测报表PDF失败(findResultById-默认路径)", e);
}
return null;
}
@@ -303,11 +380,48 @@ public class FourCheckService {
String pass = "通过";
if ("0".equals((new StringBuilder()).append(map1.get("pass")).append("").toString()))
pass = "不通过";
map1.put("pass", pass);
map1.put("checkLink", checkLink);
map1.put("dataFormat", dataFormat);
map1.put("filePath", fourCheck.getFilePath());
returnList.add(map1);
Object msgObj = map1.get("msg");
List<String> msgLines = new ArrayList<>();
if (msgObj instanceof List) {
for (Object o : (List<?>) msgObj) msgLines.addAll(softWrap(String.valueOf(o), 60));
} else if (msgObj instanceof com.alibaba.fastjson.JSONArray) {
com.alibaba.fastjson.JSONArray arr = (com.alibaba.fastjson.JSONArray) msgObj;
for (Object o : arr) msgLines.addAll(softWrap(String.valueOf(o), 60));
} else if (msgObj != null) {
msgLines.addAll(softWrap(String.valueOf(msgObj), 60));
}
final int CHUNK = 15;
if (msgLines.isEmpty()) {
Map<String, Object> row = new HashMap<>();
row.putAll(map1);
row.put("type", typeChn);
row.put("pass", pass);
row.put("checkLink", checkLink);
row.put("dataFormat", dataFormat);
row.put("filePath", fourCheck.getFilePath());
row.put("createdDate", dateFormat.format(fourCheck.getCreatedDate()));
row.put("userChnName", fourCheck.getCreatedBy());
row.put("msg", "");
returnList.add(row);
} else {
for (int i = 0; i < msgLines.size(); i += CHUNK) {
int end = Math.min(i + CHUNK, msgLines.size());
String chunkText = String.join("\n", msgLines.subList(i, end));
Map<String, Object> row = new HashMap<>();
row.putAll(map1);
row.put("type", typeChn);
row.put("pass", pass);
row.put("checkLink", checkLink);
row.put("dataFormat", dataFormat);
row.put("filePath", fourCheck.getFilePath());
row.put("createdDate", dateFormat.format(fourCheck.getCreatedDate()));
row.put("userChnName", fourCheck.getCreatedBy());
row.put("msg", chunkText);
returnList.add(row);
}
}
}
// Collections.sort(returnList, (Comparator<? super Map<String, Object>>)new Object());
// String pathName = "E:/zzrsda/point-strategy/src/main/webapp/pdffile";
@@ -323,13 +437,18 @@ public class FourCheckService {
try {
SimpleDateFormat df = new SimpleDateFormat("yyyyMMddHHmmssSSS");
String currentName = "fourcheck_" + df.format(new Date());
// 确保输出目录存在
File outDir = new File(filePath);
if (!outDir.exists()) {
outDir.mkdirs();
}
outputStream = new FileOutputStream(new File(filePath + File.separator + currentName + ".pdf"));
ExportConfigureImpl exportConfigureImpl = new ExportConfigureImpl("file:四性检测报表.ureport.xml", map, outputStream);
ExportManager exportManager = (ExportManager) Utils.getApplicationContext().getBean("ureport.exportManager");
exportManager.exportPdf((ExportConfigure)exportConfigureImpl);
return currentName;
} catch (Exception e) {
logger.error("导出四性检测报表PDF失败(findResultById-指定路径)", e);
}
return null;
}
@@ -575,7 +694,11 @@ public class FourCheckService {
map.put("link",checkItem);
map.put("describe",testItems);
if(CollectionUtils.isNotEmpty(archivesNosTwo)){
map.put("msg",archivesNosTwo);
try {
map.put("msg",objectMapper.writeValueAsString(archivesNosTwo));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}else{
map.put("msg","");
}
@@ -588,7 +711,11 @@ public class FourCheckService {
map.put("link",checkItem);
map.put("describe",testItems);
if(CollectionUtils.isNotEmpty(archivesNosThree)){
map.put("msg",archivesNosThree);
try {
map.put("msg",objectMapper.writeValueAsString(archivesNosThree));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}else{
map.put("msg","");
}
@@ -600,7 +727,11 @@ public class FourCheckService {
map.put("link",checkItem);
map.put("describe",testItems);
if(CollectionUtils.isNotEmpty(archivesNosFour)){
map.put("msg",archivesNosFour);
try {
map.put("msg",objectMapper.writeValueAsString(archivesNosFour));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}else{
map.put("msg","");
}
@@ -612,7 +743,11 @@ public class FourCheckService {
map.put("link",checkItem);
map.put("describe",testItems);
if(CollectionUtils.isNotEmpty(archivesNos)){
map.put("msg",archivesNos);
try {
map.put("msg",objectMapper.writeValueAsString(archivesNos));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}else{
map.put("msg","");
}
@@ -626,7 +761,11 @@ public class FourCheckService {
map.put("link",checkItem);
map.put("describe",testItems);
if(CollectionUtils.isNotEmpty(archivesNosOne)){
map.put("msg",archivesNosOne);
try {
map.put("msg",objectMapper.writeValueAsString(archivesNosOne));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}else{
map.put("msg","");
}
@@ -638,7 +777,11 @@ public class FourCheckService {
map.put("link",checkItem);
map.put("describe",testItems);
if(!StringUtils.isEmpty(archivesNosFive)){
map.put("msg",archivesNosFive);
try {
map.put("msg",objectMapper.writeValueAsString(archivesNosFive));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}else{
map.put("msg","");
}
@@ -1305,4 +1448,4 @@ public class FourCheckService {
return result;
}
}
}

View File

@@ -0,0 +1,83 @@
package com.point.strategy.oaDocking.service;
import com.point.strategy.common.SystemProperty;
import java.sql.*;
import java.util.logging.Level;
import java.util.logging.Logger;
public class CityBusinessSystemIntegration {
private static final Logger LOGGER = Logger.getLogger(CityBusinessSystemIntegration.class.getName());
private static final String driver = SystemProperty.getKeyValue("cityBusiness.driverClassName", "application.properties");
private static final String url = SystemProperty.getKeyValue("cityBusiness.url", "application.properties");
private static final String user = SystemProperty.getKeyValue("cityBusiness.username", "application.properties");
private static final String password = SystemProperty.getKeyValue("cityBusiness.password", "application.properties");
// 获取数据库连接
private static Connection getConnection() throws Exception {
try {
LOGGER.log(Level.INFO, "正在连接数据库: " + url);
LOGGER.log(Level.INFO, "使用用户: " + user);
LOGGER.log(Level.INFO, "加载驱动: " + driver);
Class.forName(driver);
LOGGER.log(Level.INFO, "驱动加载成功,开始建立连接...");
long startTime = System.currentTimeMillis();
Connection conn = DriverManager.getConnection(url, user, password);
long endTime = System.currentTimeMillis();
LOGGER.log(Level.INFO, "数据库连接成功,耗时: " + (endTime - startTime) + "ms");
return conn;
} catch (ClassNotFoundException e) {
LOGGER.log(Level.SEVERE, "数据库驱动未找到: " + driver, e);
throw e;
} catch (SQLException e) {
LOGGER.log(Level.SEVERE, "数据库连接失败. URL: " + url + ", User: " + user + ", SQLState: " + e.getSQLState() + ", ErrorCode: " + e.getErrorCode(), e);
throw e;
}
}
// 查询
public static ResultSet executeQuery(String sql) throws Exception {
Connection conn = null;
Statement st = null;
try {
conn = getConnection();
st = conn.createStatement();
return st.executeQuery(sql);
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "执行查询时出错: " + sql, e);
closeResources(conn, st, null);
throw e;
}
}
// 修改
public static int executeUpdate(String sql) throws Exception {
Connection conn = null;
Statement st = null;
try {
conn = getConnection();
st = conn.createStatement();
return st.executeUpdate(sql);
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "执行更新时出错: " + sql, e);
throw e;
} finally {
closeResources(conn, st, null);
}
}
// 关闭资源
public static void closeResources(Connection conn, Statement st, ResultSet rs) {
try {
if (rs != null) rs.close();
if (st != null) st.close();
if (conn != null) conn.close();
} catch (Exception e) {
LOGGER.log(Level.WARNING, "关闭资源时出错", e);
}
}
}

View File

@@ -1,6 +1,5 @@
package com.point.strategy.oaDocking.util;
import cn.hutool.core.codec.Base64;
import cn.hutool.core.io.FileTypeUtil;
import cn.hutool.core.io.FileUtil;
import com.point.strategy.common.StringUtil;
@@ -10,10 +9,8 @@ import org.apache.tools.zip.ZipFile;
import org.springframework.web.multipart.MultipartFile;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import sun.misc.BASE64Decoder;
import sun.misc.BASE64Encoder;
import java.io.*;
import java.util.Base64;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
@@ -179,7 +176,7 @@ public class FileUtils {
if(base64==null||"".equals(base64)) {
return null;
}
byte[] buff= Base64.decode(base64);
byte[] buff= java.util.Base64.getDecoder().decode(base64);
File file=null;
FileOutputStream fout=null;
try {
@@ -216,10 +213,8 @@ public class FileUtils {
} catch (IOException e) {
e.printStackTrace();
}
// 对字节数组Base64编码
BASE64Encoder encoder = new BASE64Encoder();
// 返回Base64编码过的字节数组字符串
return encoder.encode(data);
// 使用Java 8+的Base64编码
return Base64.getEncoder().encodeToString(data);
}
/**
@@ -232,16 +227,9 @@ public class FileUtils {
// 文件字节数组字符串数据为空
if (imgStr == null)
return false;
BASE64Decoder decoder = new BASE64Decoder();
try {
// Base64解码
byte[] b = decoder.decodeBuffer(imgStr);
for (int i = 0; i < b.length; ++i) {
{// 调整异常数据
if (b[i] < 0)
b[i] += 256;
}
}
// 使用Java 8+的Base64解码
byte[] b = Base64.getDecoder().decode(imgStr);
// 生成文件
// String sangImageStr = "D:/My Documents/ip.jpg" ; // 要生成文件的路径.
OutputStream out = new FileOutputStream(savedImagePath);

View File

@@ -20,13 +20,34 @@ import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.MultipartHttpServletRequest;
import javax.servlet.http.HttpServletRequest;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.Semaphore;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -63,6 +84,29 @@ public class AnjuanAndJuanneiController {
@Autowired
private OcrLogMapper ocrLogMapper;
/**
* OCR线程池限制并发与队列长度防止瞬时创建大量线程导致内存峰值
*/
private final ExecutorService ocrExecutor = new ThreadPoolExecutor(
1,
3,
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<>(30),
runnable -> {
Thread t = new Thread(runnable);
t.setName("OCR-Processing");
t.setDaemon(true);
return t;
},
new ThreadPoolExecutor.AbortPolicy()
);
/**
* 上传串行限流全局仅允许并行处理3个文件超出排队等待
*/
private static final Semaphore uploadSemaphore = new Semaphore(3);
@RequestMapping(value = "/getWord", method = RequestMethod.POST)
@ApiOperation(value = "获取图片文字")
public AjaxJson getWord(String tableName,Integer id){
@@ -193,92 +237,137 @@ public class AnjuanAndJuanneiController {
@RequestMapping(value = "/uploadSimpleFilesAnjuan", method = RequestMethod.POST)
@ApiOperation(value = "传统案卷整理原文-单个或者多个上传")
//file要与表单上传的名字相同
public AjaxJson uploadSimpleFilesAnjuan(MultipartFile[] file, String fondscode, Integer recId, String tableName, HttpServletRequest request) {
// 使用NIO方式处理多文件上传避免OOM
public AjaxJson uploadSimpleFilesAnjuan(HttpServletRequest request, String fondscode, Integer recId, String tableName) {
Integer successNum = 0;
Integer falseNum = 0;
for (int i = 0; i < file.length; i++) {
MultipartFile file0 = file[i];
//创建文件在服务器端存放路径
// String dir = request.getRealPath("/") + "uploadFile"+File.separator+ tableName+"_temp_file" + File.separator + fondscode + File.separator + recId;
String dir = uploadPath + "uploadFile/"+ tableName+"_temp_file/" + fondscode + "/" + recId;
File fileDir = new File(dir);
if (!fileDir.exists()) {
fileDir.mkdirs();
}
AjaxJson json2 = uploadFilesByPathAnjuan(file0, fondscode,dir, recId, tableName,request);
if ("101".equals(json2.getCode())) {
falseNum++;
}
if ("100".equals(json2.getCode())) {
successNum++;
}
String originalFilename = file0.getOriginalFilename();
int index = originalFilename.lastIndexOf(".") + 1;
String fileType = originalFilename.substring(index);
//启动一个线程根据ocr获取图片文字"file_content,"+
if(!fileType.equalsIgnoreCase("mp3") && !fileType.equalsIgnoreCase("mp4")) {
new Thread() {
@Override
public void run() {
OcrLog ocrLog = new OcrLog();
if(youhongIntegrate){
try {
File file1 = (File) json2.getBody().get("file");
String fileNameServer = json2.getBody().get("file_name_server").toString();
String ocrImageWord = OCRUtil.yhOcr(file1,youhongBaseUrl);
Map<String, Object> map8=new HashMap<>();
String fieldValue = "file_content" + "=" + "'"+ocrImageWord+"'";
String conditionSql = "file_name_server" + "=" + "'"+fileNameServer+"'";
map8.put("tableName",tableName+"_temp_file");
map8.put("fieldValue",fieldValue);
map8.put("conditionSql",conditionSql);
danganguanliService.updateObject(map8);
ocrLog.setFileName(file1.getName());
ocrLog.setTableName(tableName+"_temp_file");
ocrLog.setStatusType("0");
} catch (Exception e) {
ocrLog.setFailureReason(e.getMessage());
ocrLog.setStatusType("1");
ocrLogMapper.insert(ocrLog);
throw new RuntimeException(e);
}
}else{
OCRUtil.setTessPath(tessPath);
File file1 = (File) json2.getBody().get("file");
String fileNameServer = json2.getBody().get("file_name_server").toString();
try {
String ocrImageWord = OCRUtil.getOcrImageWord(file1);
ocrImageWord=ocrImageWord.replaceAll("'","");
Map<String, Object> map8 = new HashMap<>();
String fieldValue = "file_content" + "=" + "'" + ocrImageWord + "'";
String conditionSql = "file_name_server" + "=" + "'" + fileNameServer + "'";
map8.put("tableName", tableName + "_temp_file");
map8.put("fieldValue", fieldValue);
map8.put("conditionSql", conditionSql);
danganguanliService.updateObject(map8);
ocrLog.setFileName(file1.getName());
ocrLog.setTableName(tableName+"_temp_file");
ocrLog.setStatusType("0");
} catch (Exception e) {
ocrLog.setFailureReason(e.getMessage());
ocrLog.setStatusType("1");
ocrLogMapper.insert(ocrLog);
throw new RuntimeException(e);
}
}
ocrLogMapper.insert(ocrLog);
}
}.start();
// 验证参数
if (recId == null || recId <= 0) {
return AjaxJson.returnExceptionInfo("记录ID无效");
}
if (StringUtil.isEmpty(fondscode)) {
return AjaxJson.returnExceptionInfo("全宗号不能为空");
}
if (StringUtil.isEmpty(tableName)) {
return AjaxJson.returnExceptionInfo("表名不能为空");
}
// 创建文件在服务器端存放路径
String dir = uploadPath + "uploadFile" + File.separator + tableName + "_temp_file" + File.separator + fondscode + File.separator + recId;
File fileDir = new File(dir);
if (!fileDir.exists()) {
boolean created = fileDir.mkdirs();
if (!created) {
logger.error("创建目录失败: {}", dir);
return AjaxJson.returnExceptionInfo("创建目录失败");
}
}
Map<String, Object> map7=new HashMap<>();
map7.put("tableName",tableName+"_temp");
map7.put("tableName2",tableName+"_temp_file");
map7.put("id",recId);
// 验证目录是否可写
if (!fileDir.canWrite()) {
logger.error("目录无写权限: {}", dir);
return AjaxJson.returnExceptionInfo("目录无写权限");
}
// 使用NIO方式迭代处理文件避免一次性加载所有文件到内存
if (request instanceof MultipartHttpServletRequest) {
MultipartHttpServletRequest multipartRequest = (MultipartHttpServletRequest) request;
// 获取所有文件,包括同名字段的多个文件
Map<String, MultipartFile> fileMap = multipartRequest.getFileMap();
logger.info("getFileMap() 接收到文件数量: {}", fileMap.size());
// 尝试获取名为 "file" 的多文件数组
List<MultipartFile> allFiles = new ArrayList<>();
try {
List<MultipartFile> files = multipartRequest.getFiles("file");
if (files != null && files.size() > 0) {
logger.info("getFiles('file') 接收到文件数量: {}", files.size());
allFiles.addAll(files);
} else {
// 回退到 getFileMap
allFiles.addAll(fileMap.values());
logger.info("回退到 getFileMap 方法,文件数量: {}", allFiles.size());
}
} catch (Exception e) {
logger.warn("获取文件数组失败,使用 getFileMap: {}", e.getMessage());
allFiles.addAll(fileMap.values());
}
logger.info("最终处理文件数量: {}", allFiles.size());
int fileIndex = 0;
for (MultipartFile file0 : allFiles) {
boolean acquired = false;
try {
uploadSemaphore.acquire(); // 最多并行处理3个文件
acquired = true;
fileIndex++;
logger.info("处理第{}个文件: 文件名={}, 大小={} bytes", fileIndex, file0.getOriginalFilename(), file0.getSize());
if (file0 == null || file0.isEmpty()) {
logger.warn("第{}个文件为空,跳过", fileIndex);
falseNum++;
continue;
}
// 使用零拷贝方式处理单个文件,最大程度优化内存使用
AjaxJson json2 = uploadFilesByPathAnjuanZeroCopy(file0, fondscode, dir, recId, tableName, request);
if ("101".equals(json2.getCode())) {
falseNum++;
}
if ("100".equals(json2.getCode())) {
successNum++;
}
// 异步处理OCR避免阻塞
String originalFilename = file0.getOriginalFilename();
if (originalFilename != null) {
int index = originalFilename.lastIndexOf(".") + 1;
if (index > 0 && index < originalFilename.length()) {
String fileType = originalFilename.substring(index);
if (!fileType.equalsIgnoreCase("mp3") && !fileType.equalsIgnoreCase("mp4")) {
OCRProcessingTask ocrTask = new OCRProcessingTask(json2, tableName, youhongIntegrate, youhongBaseUrl, tessPath, ocrLogMapper, danganguanliService);
try {
ocrExecutor.execute(ocrTask);
} catch (RejectedExecutionException ex) {
logger.warn("OCR队列已满跳过文件: {}", originalFilename);
}
}
}
}
// 显式释放资源
try {
if (file0.getInputStream() != null) {
file0.getInputStream().close();
}
} catch (IOException e) {
logger.warn("关闭文件流时出错: {}", e.getMessage());
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
logger.warn("获取上传许可被中断,终止处理");
return AjaxJson.returnExceptionInfo("上传处理中断");
} finally {
if (acquired) {
uploadSemaphore.release();
}
}
}
} else {
return AjaxJson.returnExceptionInfo("请求类型不支持");
}
Map<String, Object> map7 = new HashMap<>();
map7.put("tableName", tableName + "_temp");
map7.put("tableName2", tableName + "_temp_file");
map7.put("id", recId);
danganguanliService.wsajmlTempCount(map7);
AjaxJson json = AjaxJson.returnInfo("成功上传数successNum失败上传数falseNum");
AjaxJson json = AjaxJson.returnInfo("成功上传数:"+successNum+",失败上传数:"+falseNum);
json.put("successNum", successNum);
json.put("falseNum", falseNum);
return json;
@@ -287,9 +376,19 @@ public class AnjuanAndJuanneiController {
private AjaxJson uploadFilesByPathAnjuan(MultipartFile file,String fondscode, String dir, Integer recId,String tableName, HttpServletRequest request) {
AjaxJson json = null;
File files = null;
try {
String originalFilename = file.getOriginalFilename();
if (StringUtil.isEmpty(originalFilename)) {
return AjaxJson.returnExceptionInfo("文件名为空");
}
int index = originalFilename.lastIndexOf(".") + 1;
if (index <= 0 || index >= originalFilename.length()) {
return AjaxJson.returnExceptionInfo("文件格式不正确");
}
String fileType = originalFilename.substring(index);
String file_name_server=StringUtil.generaterUUID()+"."+fileType;
@@ -299,11 +398,40 @@ public class AnjuanAndJuanneiController {
map5.put("tableName",tableName + "_temp_file");
map5.put("conditionSql","rec_id= '"+recId+"' and file_status=1 ");
int pageNo =danganguanliService.selectObjectCount(map5)+1;
File files = new File(dir + "/" + file_name_server);
// 使用File.separator确保跨平台兼容性
files = new File(dir + File.separator + file_name_server);
// 检查目标文件是否已存在
if (files.exists()) {
logger.warn("目标文件已存在,将覆盖: {}", files.getAbsolutePath());
boolean deleted = files.delete();
if (!deleted) {
logger.error("无法删除已存在的文件: {}", files.getAbsolutePath());
return AjaxJson.returnExceptionInfo("无法删除已存在的文件");
}
}
// 验证目录是否存在且可写
File parentDir = files.getParentFile();
if (parentDir == null || !parentDir.exists()) {
return AjaxJson.returnExceptionInfo("父目录不存在");
}
if (!parentDir.canWrite()) {
logger.error("目录无写权限: {}", parentDir.getAbsolutePath());
return AjaxJson.returnExceptionInfo("目录无写权限");
}
// 文件传输
file.transferTo(files);
// 验证文件是否成功写入
if (!files.exists() || files.length() == 0) {
logger.error("文件传输失败或文件为空: {}", files.getAbsolutePath());
return AjaxJson.returnExceptionInfo("文件传输失败");
}
String file_path="uploadFile/"+tableName + "_temp_file/"+fondscode+"/"+recId;
String file_path="uploadFile" + File.separator + tableName + "_temp_file" + File.separator + fondscode + File.separator + recId;
// String file_path=getFileName(dir);
String fieldName=
"file_name," +
@@ -331,42 +459,439 @@ public class AnjuanAndJuanneiController {
map.put("valueName",valueName);
danganguanliService.saveObject(map);
//再把文件复制一份,作用是 用于合并文件、合并下载
// String newName=file_name_server.replace(".jpg","_original.jpg");
// FileTool.copyFile(dir+File.separator+file_name_server,dir+File.separator+newName);
if(!fileType.equalsIgnoreCase("mp3") && !fileType.equalsIgnoreCase("mp4")){
//生成一份pdf文件用于归档章的操作
//生成一份pdf文件用于归档章的操作 - 使用File.separator
String newName_pdf=file_name_server.replace("."+fileType,".pdf");
PdfFileHelper.image2Pdf(dir+File.separator+file_name_server,dir+File.separator+newName_pdf);
String sourcePath = dir + File.separator + file_name_server;
String targetPath = dir + File.separator + newName_pdf;
String newName_pdf_original=newName_pdf.replace(".pdf","_original.pdf");
FileTool.copyFile(dir+File.separator+newName_pdf,dir+File.separator+newName_pdf_original);
boolean pdfCreated = PdfFileHelper.image2Pdf(sourcePath, targetPath);
if (!pdfCreated) {
logger.warn("PDF文件生成失败: {} -> {}", sourcePath, targetPath);
} else {
// 只有PDF生成成功才复制原始文件
String newName_pdf_original=newName_pdf.replace(".pdf","_original.pdf");
String originalPath = dir + File.separator + newName_pdf_original;
FileTool.copyFile(targetPath, originalPath);
}
}
//mxf格式的文件需要转换一份mp4给前端展示
if (fileType.equalsIgnoreCase("mxf")) {
String replaceMp4 = "";
String replaceMp4;
if ("MXF".equals(fileType)) {
replaceMp4 = files.getPath().replace(".MXF", ".mp4");
}else {
} else {
replaceMp4 = files.getPath().replace(".mxf", ".mp4");
}
VideoConvertUtil.convert(files.getPath(), replaceMp4);
}
json = AjaxJson.returnInfo("上传文件成功");
json.put("file",files);
json.put("file_name_server",file_name_server);
} catch (Exception e) {
json = AjaxJson.returnExceptionInfo("上传文件失败" + e);
logger.info(file.getOriginalFilename() + "上传文件失败" + e);
logger.error("上传文件失败: {}", file.getOriginalFilename(), e);
json = AjaxJson.returnExceptionInfo("上传文件失败: " + e.getMessage());
// 清理失败的文件
if (files != null && files.exists()) {
try {
boolean deleted = files.delete();
if (deleted) {
logger.info("清理失败文件成功: {}", files.getAbsolutePath());
}
} catch (Exception deleteEx) {
logger.warn("清理失败文件时出错: {}", files.getAbsolutePath(), deleteEx);
}
}
}
return json;
}
/**
* NIO方式上传文件避免OOM问题
*/
private AjaxJson uploadFilesByPathAnjuanNIO(MultipartFile file, String fondscode, String dir, Integer recId, String tableName, HttpServletRequest request) {
AjaxJson json = null;
Path targetPath = null;
InputStream inputStream = null;
FileChannel fileChannel = null;
try {
String originalFilename = file.getOriginalFilename();
if (StringUtil.isEmpty(originalFilename)) {
return AjaxJson.returnExceptionInfo("文件名为空");
}
int index = originalFilename.lastIndexOf(".") + 1;
if (index <= 0 || index >= originalFilename.length()) {
return AjaxJson.returnExceptionInfo("文件格式不正确");
}
String fileType = originalFilename.substring(index);
String file_name_server = StringUtil.generaterUUID() + "." + fileType;
long fileLen = file.getSize() / 1024;
Map<String, Object> map5 = new HashMap<>();
map5.put("tableName", tableName + "_temp_file");
map5.put("conditionSql", "rec_id= '" + recId + "' and file_status=1 ");
int pageNo = danganguanliService.selectObjectCount(map5) + 1;
// 使用NIO Path和Files API
targetPath = Paths.get(dir, file_name_server);
// 检查目标文件是否已存在
if (Files.exists(targetPath)) {
logger.warn("目标文件已存在,将覆盖: {}", targetPath.toAbsolutePath());
Files.deleteIfExists(targetPath);
}
// 验证父目录是否存在且可写
Path parentDir = targetPath.getParent();
if (parentDir == null || !Files.exists(parentDir)) {
return AjaxJson.returnExceptionInfo("父目录不存在");
}
if (!Files.isWritable(parentDir)) {
logger.error("目录无写权限: {}", parentDir.toAbsolutePath());
return AjaxJson.returnExceptionInfo("目录无写权限");
}
// 使用NIO方式写入文件避免内存OOM
inputStream = file.getInputStream();
fileChannel = FileChannel.open(targetPath,
java.nio.file.StandardOpenOption.CREATE,
java.nio.file.StandardOpenOption.WRITE,
java.nio.file.StandardOpenOption.TRUNCATE_EXISTING);
ReadableByteChannel readableByteChannel = Channels.newChannel(inputStream);
// 使用transferFrom进行高效文件传输
long transferred = fileChannel.transferFrom(readableByteChannel, 0, Long.MAX_VALUE);
// 验证文件是否成功写入
if (!Files.exists(targetPath) || Files.size(targetPath) == 0) {
logger.error("文件传输失败或文件为空: {}", targetPath.toAbsolutePath());
return AjaxJson.returnExceptionInfo("文件传输失败");
}
logger.info("文件传输完成: {} bytes, 目标: {}", transferred, targetPath.toAbsolutePath());
String file_path = "uploadFile" + File.separator + tableName + "_temp_file" + File.separator + fondscode + File.separator + recId;
String fieldName =
"file_name," +
"rec_id," +
"file_type," +
"file_len," +
"file_path," +
"page_no," +
"file_status," +
"is_divided," +
"file_des," +
"file_name_server";
String valueName = "'" + originalFilename + "'" + ","
+ "'" + recId + "'" + ","
+ "'" + fileType + "'" + ","
+ "'" + fileLen + "'" + ","
+ "'" + file_path + "'" + ","
+ "'" + pageNo + "'"
+ ",1,-1,"
+ "'" + dir + "'" + ","
+ "'" + file_name_server + "'";
Map<String, Object> map = new HashMap<String, Object>();
map.put("tableName", tableName + "_temp_file");
map.put("fieldName", fieldName);
map.put("valueName", valueName);
danganguanliService.saveObject(map);
if (!fileType.equalsIgnoreCase("mp3") && !fileType.equalsIgnoreCase("mp4")) {
//生成一份pdf文件用于归档章的操作 - 使用NIO
String newName_pdf = file_name_server.replace("." + fileType, ".pdf");
Path sourcePath = targetPath;
Path pdfPath = Paths.get(dir, newName_pdf);
boolean pdfCreated = PdfFileHelper.image2Pdf(sourcePath.toString(), pdfPath.toString());
if (!pdfCreated) {
logger.warn("PDF文件生成失败: {} -> {}", sourcePath, pdfPath);
} else {
// 只有PDF生成成功才复制原始文件
String newName_pdf_original = newName_pdf.replace(".pdf", "_original.pdf");
Path originalPath = Paths.get(dir, newName_pdf_original);
try {
Files.copy(pdfPath, originalPath, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
logger.warn("复制PDF原始文件失败: {}", e.getMessage());
}
}
}
//mxf格式的文件需要转换一份mp4给前端展示
if (fileType.equalsIgnoreCase("mxf")) {
String replaceMp4;
if ("MXF".equals(fileType)) {
replaceMp4 = targetPath.toString().replace(".MXF", ".mp4");
} else {
replaceMp4 = targetPath.toString().replace(".mxf", ".mp4");
}
VideoConvertUtil.convert(targetPath.toString(), replaceMp4);
}
json = AjaxJson.returnInfo("上传文件成功");
json.put("file", targetPath.toFile());
json.put("file_name_server", file_name_server);
} catch (Exception e) {
logger.error("上传文件失败: {}", file.getOriginalFilename(), e);
json = AjaxJson.returnExceptionInfo("上传文件失败: " + e.getMessage());
// 清理失败的文件
if (targetPath != null && Files.exists(targetPath)) {
try {
Files.deleteIfExists(targetPath);
logger.info("清理失败文件成功: {}", targetPath.toAbsolutePath());
} catch (Exception deleteEx) {
logger.warn("清理失败文件时出错: {}", targetPath.toAbsolutePath(), deleteEx);
}
}
} finally {
// 确保资源被正确释放
try {
if (fileChannel != null) {
fileChannel.close();
}
} catch (IOException e) {
logger.warn("关闭FileChannel时出错: {}", e.getMessage());
}
try {
if (inputStream != null) {
inputStream.close();
}
} catch (IOException e) {
logger.warn("关闭InputStream时出错: {}", e.getMessage());
}
}
return json;
}
/**
* 零拷贝方式上传文件最大程度优化内存使用防止OOM
*/
private AjaxJson uploadFilesByPathAnjuanZeroCopy(MultipartFile file, String fondscode, String dir, Integer recId, String tableName, HttpServletRequest request) {
AjaxJson json = null;
Path targetPath = null;
ReadableByteChannel readableByteChannel = null;
FileChannel fileChannel = null;
try {
String originalFilename = file.getOriginalFilename();
if (StringUtil.isEmpty(originalFilename)) {
return AjaxJson.returnExceptionInfo("文件名为空");
}
int index = originalFilename.lastIndexOf(".") + 1;
if (index <= 0 || index >= originalFilename.length()) {
return AjaxJson.returnExceptionInfo("文件格式不正确");
}
String fileType = originalFilename.substring(index);
String file_name_server = StringUtil.generaterUUID() + "." + fileType;
long fileLen = file.getSize() / 1024;
Map<String, Object> map5 = new HashMap<>();
map5.put("tableName", tableName + "_temp_file");
map5.put("conditionSql", "rec_id= '" + recId + "' and file_status=1 ");
int pageNo = danganguanliService.selectObjectCount(map5) + 1;
// 使用NIO Path和Files API
targetPath = Paths.get(dir, file_name_server);
// 检查目标文件是否已存在
if (Files.exists(targetPath)) {
logger.warn("目标文件已存在,将覆盖: {}", targetPath.toAbsolutePath());
Files.deleteIfExists(targetPath);
}
// 验证父目录是否存在且可写
Path parentDir = targetPath.getParent();
if (parentDir == null || !Files.exists(parentDir)) {
return AjaxJson.returnExceptionInfo("父目录不存在");
}
if (!Files.isWritable(parentDir)) {
logger.error("目录无写权限: {}", parentDir.toAbsolutePath());
return AjaxJson.returnExceptionInfo("目录无写权限");
}
// 零拷贝实现直接使用FileChannel进行传输避免数据在用户空间的拷贝
try (InputStream inputStream = file.getInputStream()) {
readableByteChannel = Channels.newChannel(inputStream);
fileChannel = FileChannel.open(targetPath,
StandardOpenOption.CREATE,
StandardOpenOption.WRITE,
StandardOpenOption.TRUNCATE_EXISTING);
// 使用transferFrom实现零拷贝数据直接从内核缓冲区传输到文件
long transferred = 0;
long position = 0;
long count = Long.MAX_VALUE;
// 分块传输大文件,避免单次传输过大导致的问题
final long CHUNK_SIZE = 8 * 1024 * 1024; // 8MB chunks
while (position < file.getSize()) {
long chunkSize = Math.min(CHUNK_SIZE, file.getSize() - position);
long transferredChunk = fileChannel.transferFrom(readableByteChannel, position, chunkSize);
if (transferredChunk == 0) {
break; // 传输完成
}
position += transferredChunk;
transferred += transferredChunk;
}
// 强制写入磁盘,确保数据持久化
fileChannel.force(true);
// 验证文件是否成功写入
if (!Files.exists(targetPath) || Files.size(targetPath) == 0) {
logger.error("文件传输失败或文件为空: {}", targetPath.toAbsolutePath());
return AjaxJson.returnExceptionInfo("文件传输失败");
}
logger.info("零拷贝文件传输完成: {} bytes, 目标: {}", transferred, targetPath.toAbsolutePath());
}
String file_path = "uploadFile" + File.separator + tableName + "_temp_file" + File.separator + fondscode + File.separator + recId;
String fieldName =
"file_name," +
"rec_id," +
"file_type," +
"file_len," +
"file_path," +
"page_no," +
"file_status," +
"is_divided," +
"file_des," +
"file_name_server";
String valueName = "'" + originalFilename + "'" + ","
+ "'" + recId + "'" + ","
+ "'" + fileType + "'" + ","
+ "'" + fileLen + "'" + ","
+ "'" + file_path + "'" + ","
+ "'" + pageNo + "'"
+ ",1,-1,"
+ "'" + dir + "'" + ","
+ "'" + file_name_server + "'";
Map<String, Object> map = new HashMap<String, Object>();
map.put("tableName", tableName + "_temp_file");
map.put("fieldName", fieldName);
map.put("valueName", valueName);
danganguanliService.saveObject(map);
if (!fileType.equalsIgnoreCase("mp3") && !fileType.equalsIgnoreCase("mp4")) {
//生成一份pdf文件用于归档章的操作 - 使用零拷贝
String newName_pdf = file_name_server.replace("." + fileType, ".pdf");
Path sourcePath = targetPath;
Path pdfPath = Paths.get(dir, newName_pdf);
boolean pdfCreated = PdfFileHelper.image2Pdf(sourcePath.toString(), pdfPath.toString());
if (!pdfCreated) {
logger.warn("PDF文件生成失败: {} -> {}", sourcePath, pdfPath);
} else {
// 只有PDF生成成功才复制原始文件 - 使用零拷贝
String newName_pdf_original = newName_pdf.replace(".pdf", "_original.pdf");
Path originalPath = Paths.get(dir, newName_pdf_original);
try {
// 使用零拷贝复制文件
copyFileZeroCopy(pdfPath, originalPath);
} catch (IOException e) {
logger.warn("零拷贝复制PDF原始文件失败: {}", e.getMessage());
// 降级到普通拷贝
try {
Files.copy(pdfPath, originalPath, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException fallbackEx) {
logger.warn("复制PDF原始文件失败: {}", fallbackEx.getMessage());
}
}
}
}
//mxf格式的文件需要转换一份mp4给前端展示
if (fileType.equalsIgnoreCase("mxf")) {
String replaceMp4;
if ("MXF".equals(fileType)) {
replaceMp4 = targetPath.toString().replace(".MXF", ".mp4");
} else {
replaceMp4 = targetPath.toString().replace(".mxf", ".mp4");
}
VideoConvertUtil.convert(targetPath.toString(), replaceMp4);
}
json = AjaxJson.returnInfo("上传文件成功");
json.put("file", targetPath.toFile());
json.put("file_name_server", file_name_server);
} catch (Exception e) {
logger.error("零拷贝上传文件失败: {}", file.getOriginalFilename(), e);
json = AjaxJson.returnExceptionInfo("上传文件失败: " + e.getMessage());
// 清理失败的文件
if (targetPath != null && Files.exists(targetPath)) {
try {
Files.deleteIfExists(targetPath);
logger.info("清理失败文件成功: {}", targetPath.toAbsolutePath());
} catch (Exception deleteEx) {
logger.warn("清理失败文件时出错: {}", targetPath.toAbsolutePath(), deleteEx);
}
}
} finally {
// 确保资源被正确释放
try {
if (fileChannel != null) {
fileChannel.close();
}
} catch (IOException e) {
logger.warn("关闭FileChannel时出错: {}", e.getMessage());
}
try {
if (readableByteChannel != null) {
readableByteChannel.close();
}
} catch (IOException e) {
logger.warn("关闭ReadableByteChannel时出错: {}", e.getMessage());
}
}
return json;
}
/**
* 零拷贝文件复制方法
*/
private void copyFileZeroCopy(Path source, Path target) throws IOException {
try (FileChannel sourceChannel = FileChannel.open(source, StandardOpenOption.READ);
FileChannel targetChannel = FileChannel.open(target,
StandardOpenOption.CREATE,
StandardOpenOption.WRITE,
StandardOpenOption.TRUNCATE_EXISTING)) {
long size = sourceChannel.size();
long position = 0;
final long CHUNK_SIZE = 8 * 1024 * 1024; // 8MB chunks
while (position < size) {
long chunkSize = Math.min(CHUNK_SIZE, size - position);
long transferred = targetChannel.transferFrom(sourceChannel, position, chunkSize);
if (transferred == 0) {
break;
}
position += transferred;
}
// 强制写入磁盘
targetChannel.force(true);
}
}
//==================================================================================

View File

@@ -0,0 +1,114 @@
package com.point.strategy.originBatchUpload;
import com.point.strategy.bean.OcrLog;
import com.point.strategy.common.AjaxJson;
import com.point.strategy.ocr.OCRUtil;
import com.point.strategy.dao.OcrLogMapper;
import com.point.strategy.docTraditionArrange.docVolume.service.DanganguanliService;
import lombok.extern.slf4j.Slf4j;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
/**
* OCR处理任务 - 替代匿名内部类
*/
@Slf4j
public class OCRProcessingTask implements Runnable {
// 手动添加logger变量作为Lombok @Slf4j的临时替代方案
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(OCRProcessingTask.class);
private final AjaxJson json2;
private final String tableName;
private final boolean youhongIntegrate;
private final String youhongBaseUrl;
private final String tessPath;
private final OcrLogMapper ocrLogMapper;
private final DanganguanliService danganguanliService;
public OCRProcessingTask(AjaxJson json2, String tableName, boolean youhongIntegrate,
String youhongBaseUrl, String tessPath, OcrLogMapper ocrLogMapper,
DanganguanliService danganguanliService) {
this.json2 = json2;
this.tableName = tableName;
this.youhongIntegrate = youhongIntegrate;
this.youhongBaseUrl = youhongBaseUrl;
this.tessPath = tessPath;
this.ocrLogMapper = ocrLogMapper;
this.danganguanliService = danganguanliService;
}
@Override
public void run() {
OcrLog ocrLog = new OcrLog();
ocrLog.setTableName(tableName + "_temp_file");
File file1 = null;
String fileNameServer = null;
try {
file1 = (File) json2.getBody().get("file");
fileNameServer = json2.getBody().get("file_name_server").toString();
if (file1 == null || !file1.exists()) {
throw new RuntimeException("文件不存在: " + fileNameServer);
}
String ocrImageWord = "";
if (youhongIntegrate) {
// 使用友虹OCR
try {
ocrImageWord = OCRUtil.yhOcr(file1, youhongBaseUrl);
log.info("友虹OCR处理成功: {}", file1.getName());
} catch (Exception e) {
log.error("友虹OCR处理失败: {}", file1.getName(), e);
throw new RuntimeException("友虹OCR处理失败: " + e.getMessage());
}
} else {
// 使用本地Tesseract OCR
try {
OCRUtil.setTessPath(tessPath);
ocrImageWord = OCRUtil.getOcrImageWord(file1);
ocrImageWord = ocrImageWord.replaceAll("'", ""); // 清理单引号
log.info("Tesseract OCR处理成功: {}", file1.getName());
} catch (Exception e) {
log.error("Tesseract OCR处理失败: {}", file1.getName(), e);
throw new RuntimeException("Tesseract OCR处理失败: " + e.getMessage());
}
}
// 更新数据库中的OCR结果
if (!ocrImageWord.isEmpty()) {
Map<String, Object> map8 = new HashMap<>();
String fieldValue = "file_content" + "=" + "'" + ocrImageWord + "'";
String conditionSql = "file_name_server" + "=" + "'" + fileNameServer + "'";
map8.put("tableName", tableName + "_temp_file");
map8.put("fieldValue", fieldValue);
map8.put("conditionSql", conditionSql);
danganguanliService.updateObject(map8);
}
// 记录成功日志
ocrLog.setFileName(file1.getName());
ocrLog.setStatusType("0"); // 成功
ocrLog.setFailureReason(null);
} catch (Exception e) {
log.error("OCR处理异常", e);
// 记录失败日志
ocrLog.setFileName(file1 != null ? file1.getName() : "unknown");
ocrLog.setStatusType("1"); // 失败
ocrLog.setFailureReason(e.getMessage());
} finally {
try {
ocrLogMapper.insert(ocrLog);
log.info("OCR日志记录完成: {} - {}", ocrLog.getFileName(), ocrLog.getStatusType());
} catch (Exception e) {
log.error("OCR日志记录失败", e);
}
}
}
}

View File

@@ -588,9 +588,12 @@ public class ReceiveInformationService {
map.put("fondscode",fondscode);
map.put("detection",detection);
String fileName = fourCheckService.check(map);
if (fileName.equals("请先设置四性检测条目")){
// 避免对 null 调用 equals先判空
if ("请先设置四性检测条目".equals(fileName)){
json = AjaxJson.returnExceptionInfo(fileName);
}else {
} else if (fileName == null) {
json = AjaxJson.returnExceptionInfo("四性检测报表生成失败");
} else {
// //修改检测结果
// ReceiveInformation information1 = new ReceiveInformation();
// information1.setDetectionresult(2);

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,16 @@
package com.point.strategy.table;
import com.point.strategy.bean.TentityStructDescription;
import com.point.strategy.common.SystemProperty;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import java.sql.*;
import javax.annotation.PostConstruct;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
@@ -14,30 +20,67 @@ import java.util.List;
@Service
public class OperatorTable {
private static Environment env;
private static String url;
private static Connection conn = null;
private static Statement st = null;
private static String driver_dm = SystemProperty.getKeyValue("spring.datasource.driverClassName", "application.properties");
private static String driver_mysql = SystemProperty.getKeyValue("spring.datasource.driverClassName", "application.properties");
private static String driver_kingbase = SystemProperty.getKeyValue("spring.datasource.driverClassName", "application.properties");
@Autowired
public void setEnv(Environment environment) {
OperatorTable.env = environment;
}
private static String url = SystemProperty.getKeyValue("spring.datasource.url", "application.properties");
@PostConstruct
public void initEnv() {
// 触发一次读取,确保在 Spring 环境内完成静态字段赋值
getUrl();
}
private static String user = SystemProperty.getKeyValue("spring.datasource.username", "application.properties");
private static String getDriver() {
validateEnv();
return env.getProperty("spring.datasource.driverClassName");
}
private static String password = SystemProperty.getKeyValue("spring.datasource.password", "application.properties");
private static String getUrl() {
validateEnv();
url = env.getProperty("spring.datasource.url");
if (url == null || url.trim().isEmpty()) {
throw new IllegalStateException("未找到 spring.datasource.url请确认激活的 yml 中已配置数据源");
}
return url;
}
private static String getUser() {
validateEnv();
return env.getProperty("spring.datasource.username");
}
private static String getPassword() {
validateEnv();
return env.getProperty("spring.datasource.password");
}
private static void validateEnv() {
if (env == null) {
throw new IllegalStateException("未获取到 Spring 环境,无法读取激活配置的数据源信息");
}
}
//获取表字段
public static Boolean getTableField(String tableName,String field) throws Exception {
try {
Boolean flag = true;
String url = getUrl();
String driver = getDriver();
String user = getUser();
String password = getPassword();
if (url.contains("dm")){
Class.forName(driver_dm);
Class.forName(driver);
}else if(url.contains("mysql")){
Class.forName(driver_mysql);
Class.forName(driver);
} else if (url.contains("kingbase8")||url.contains("highgo")) {
Class.forName(driver_kingbase);
Class.forName(driver);
}
conn = DriverManager.getConnection(url, user, password);
//获得元数据
@@ -69,12 +112,16 @@ public class OperatorTable {
//添加字段
public static void addField(String sqlDDL) throws Exception {
try {
String url = getUrl();
String driver = getDriver();
String user = getUser();
String password = getPassword();
if (url.contains("dm")){
Class.forName(driver_dm);
Class.forName(driver);
}else if(url.contains("mysql")){
Class.forName(driver_mysql);
Class.forName(driver);
} else if (url.contains("kingbase8")||url.contains("highgo")) {
Class.forName(driver_kingbase);
Class.forName(driver);
}
conn = DriverManager.getConnection(url, user, password);
st = conn.createStatement();
@@ -93,12 +140,16 @@ public class OperatorTable {
public static void createTable(String sqlDDL) throws Exception {
try {
String url = getUrl();
String driver = getDriver();
String user = getUser();
String password = getPassword();
if (url.contains("dm")){
Class.forName(driver_dm);
Class.forName(driver);
}else if(url.contains("mysql")){
Class.forName(driver_mysql);
Class.forName(driver);
}else if (url.contains("kingbase8")||url.contains("highgo")) {
Class.forName(driver_kingbase);
Class.forName(driver);
}
conn = DriverManager.getConnection(url, user, password);
st = conn.createStatement();
@@ -118,12 +169,16 @@ public class OperatorTable {
public static void deleteTable(String sqlDDL) throws Exception {
try {
String url = getUrl();
String driver = getDriver();
String user = getUser();
String password = getPassword();
if (url.contains("dm")){
Class.forName(driver_dm);
Class.forName(driver);
}else if(url.contains("mysql")){
Class.forName(driver_mysql);
Class.forName(driver);
}else if (url.contains("kingbase8")||url.contains("highgo")) {
Class.forName(driver_kingbase);
Class.forName(driver);
}
conn = DriverManager.getConnection(url, user, password);
st = conn.createStatement();
@@ -2150,4 +2205,4 @@ public class OperatorTable {
System.out.println(start);
System.out.println(end);
}
}
}

View File

@@ -202,6 +202,7 @@ public class UreportService {
String tarFile = pathName + File.separator+ currentName + ".pdf";
PdfFileHelper.mergePdf(arr, tarFile);
}
}else{
//不是封面pdf

View File

@@ -124,9 +124,9 @@ public class SingleUserController {
} else {
//保存用户信息
session.setAttribute("user", userForBase);
if (cheackSession(userForBase.getUsername())) {
DbUtil.userLogout(userForBase.getUsername(), session);
}
// if (cheackSession(userForBase.getUsername())) {
// DbUtil.userLogout(userForBase.getUsername(), session);
// }
//查询用户对应的角色
List<Role> roles = new ArrayList<>();
List<Integer> integers = userService.selectRoleIdByUserId(userForBase.getId());
@@ -325,4 +325,4 @@ public class SingleUserController {
}
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,49 +1,144 @@
# 开发环境配置
---
server:
port: 9081
servlet:
context-path: /point-strategy
tomcat:
uriEncoding: UTF-8
max-threads: 1000
max-connections: 20000
spring:
#resources:
#static-locations: classpath:pdf/
application:
name: point-strategy
#elasticsearch:
#rest:
#uris: http://127.0.0.1:9200
#logging:
# level.org.springframework.boot.autoconfigure: info #spring的自动装配日志只打info否则debug输出的会打印很多自动装配的log信息到控制台
# config: classpath:logback-spring.xml
# 开发环境数据源配置
datasource:
driverClassName: com.mysql.jdbc.Driver
url: jdbc:mysql://100.64.11.2:3311/enterprise_digital_archives?useUnicode=true&characterEncoding=utf8&characterSetResults=utf8
username: root
password: Abc@123456
# 开发环境Redis配置
redis:
database: 2
host: 100.64.11.2
port: 6379
password: Abc123456
pool:
max-active: 200
max-wait: -1
max-idle: 10
min-idle: 0
timeout: 50000
# 文件上传配置
servlet:
multipart:
max-file-size: -1
max-request-size: -1
# Elasticsearch配置
elasticsearch:
rest:
uris: http://127.0.0.1:9200
# MyBatis配置
mybatis:
mapper-locations: classpath:mapper/*.xml
type-aliases-package: com.point.strategy.fond.bean,com.point.strategy.docSimpleArrange.bean,com.point.strategy.docTraditionArrange.docFile.bean,com.point.strategy.docTraditionArrange.docVolume.bean,com.point.strategy.bean
configuration:
map-underscore-to-camel-case: true
call-setters-on-nulls: true
##单点登录配置
#keycloak:
# credentials:
# # 客户端密钥
# secret: 03499cda-9d0d-40e4-8f96-369fb21c8812
# # 表示是一个public的client
# # public-client: true
# # keycloak的地址
# auth-server-url: https://auth.jztweb.com/auth
# # keycloak中的realm
# realm: jzt
# # client ID
# resource: danan
# # 安全约束
# securityConstraints:
# - authRoles:
# # 以下路径需要demoUser角色才能访问 '*'
# - '*'
# securityCollections:
# # name可以随便写
# - name: common guanli
# patterns:
# - /JztIndex
# 开发环境日志配置
logging:
level:
org.springframework.boot.autoconfigure: info
com.point.strategy: debug
config: classpath:logback-spring.xml
# Swagger配置
swagger:
show: true
# 档案XML生成配置
archiveXML:
generate: false
# 开发环境文件路径配置
upload:
path: /Users/ab/Desktop/tmp/data/tomcat/webapps/upload
temp:
path: /Users/ab/Desktop/tmp/data/tempPath
unzip:
path: /Users/ab/Desktop/tmp/data/unzip
img:
upload: /Users/ab/Desktop/tmp/data/upload/
report:
path: /Users/ab/Desktop/tmp/data/report/path/
# 网络上传文件临时路径配置
net:
upload:
# Windows环境路径
win:
filePath: "D:\\fileAll\\"
targetPath: "D:\\testFile"
# Linux环境路径
linux:
filePath: "/home/fileAll/"
targetPath: "/home/testFile"
upload-two:
# Windows环境路径
win:
filePath: "D:\\fileAllTwo\\"
targetPath: "D:\\testFileTwo"
# Linux环境路径
linux:
filePath: "/home/fileAllTwo/"
targetPath: "/home/testFileTwo"
jzt:
# Windows环境路径
win:
filePath: "D:\\fileAll\\"
targetPath: "D:\\testFile"
# Linux环境路径
linux:
filePath: "/opt/fileAll/"
targetPath: "/opt/testFile"
# 友虹OCR配置
youhong:
integrate: true
baseUrl: http://localhost:9000/v1/
# 湿度监控IP
humidityIp: 10.19.16.64:1433
# 上架IP
shelvingIp: 10.19.16.64:1236
# 移车系统集成配置
ycj:
driverClassName: com.mysql.jdbc.Driver
url: jdbc:mysql://10.71.102.152:3306/dagl?useUnicode=true&characterEncoding=utf8&characterSetResults=utf8
username: ezdagl
password: ezdagl#20241120
# 市公司业务数据系统集成配置
cityBusiness:
driverClassName: com.mysql.jdbc.Driver
url: jdbc:mysql://localhost:3306/citybusiness?useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&connectTimeout=5000&socketTimeout=5000
username: root
password: Abc@123456
# 温湿度监控API配置
temperature:
getAllWareHouseList: http://192.168.0.126:8080/api/getAllWareHouseList
getAirRealTimeAnd24HDataByWareHouseId: http://192.168.0.126:8080/api/getAirRealTimeAnd24HDataByWareHouseId
# OCR配置
ocr:
tessPath: ${TESS_PATH:D://install//tesseract-ocr}
#

View File

@@ -0,0 +1,135 @@
# 生产环境配置
---
server:
port: ${SERVER_PORT:9081}
servlet:
context-path: ${SERVER_CONTEXT_PATH:/point-strategy}
tomcat:
uriEncoding: UTF-8
max-threads: ${TOMCAT_MAX_THREADS:1000}
max-connections: ${TOMCAT_MAX_CONNECTIONS:20000}
spring:
# 生产环境数据源配置
datasource:
driverClassName: ${DB_DRIVER:com.mysql.cj.jdbc.Driver}
url: jdbc:mysql://${DB_HOST:mysql}:${DB_PORT:3306}/${DB_NAME:enterprise_digital_archives}?useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
username: ${DB_USERNAME:root}
password: ${DB_PASSWORD:Abc@123456}
# 生产环境Redis配置
redis:
database: ${REDIS_DATABASE:2}
host: ${REDIS_HOST:127.0.0.1}
port: ${REDIS_PORT:6379}
password: ${REDIS_PASSWORD:prod_redis_password}
pool:
max-active: ${REDIS_MAX_ACTIVE:200}
max-wait: ${REDIS_MAX_WAIT:-1}
max-idle: ${REDIS_MAX_IDLE:10}
min-idle: ${REDIS_MIN_IDLE:0}
timeout: ${REDIS_TIMEOUT:50000}
# 文件上传配置
servlet:
multipart:
max-file-size: ${MAX_FILE_SIZE:-1}
max-request-size: ${MAX_REQUEST_SIZE:-1}
# Elasticsearch配置
elasticsearch:
rest:
uris: ${ELASTICSEARCH_SCHEME:http}://${ELASTICSEARCH_HOST:127.0.0.1}:${ELASTICSEARCH_PORT:9200}
# MyBatis配置
mybatis:
mapper-locations: classpath:mapper/*.xml
type-aliases-package: com.point.strategy.fond.bean,com.point.strategy.docSimpleArrange.bean,com.point.strategy.docTraditionArrange.docFile.bean,com.point.strategy.docTraditionArrange.docVolume.bean,com.point.strategy.bean
configuration:
map-underscore-to-camel-case: true
call-setters-on-nulls: true
# 生产环境日志配置
logging:
level:
org.springframework.boot.autoconfigure: ${LOG_ROOT_LEVEL:warn}
com.point.strategy: ${LOG_APP_LEVEL:info}
config: classpath:logback-spring.xml
# Swagger配置
swagger:
show: ${SWAGGER_SHOW:false}
# 档案XML生成配置
archiveXML:
generate: ${ARCHIVE_XML_GENERATE:false}
# 生产环境文件路径配置Docker环境安全路径
# 注意:所有路径都不应该以斜杠结尾,避免路径拼接时出现双斜杠问题
upload:
path: ${UPLOAD_PATH:/app/data/upload}
temp:
path: ${TEMP_PATH:/app/data/temp}
unzip:
path: ${UNZIP_PATH:/app/data/unzip}
img:
upload: ${IMG_UPLOAD_PATH:/app/data/images} # 注意:不以斜杠结尾
report:
path: ${REPORT_PATH:/app/data/reports} # 注意:不以斜杠结尾
# 网络上传文件临时路径配置Docker环境安全路径
net:
upload:
# Windows环境路径
win:
filePath: ${NET_UPLOAD_WIN_FILEPATH:"D:\\fileAll\\"}
targetPath: ${NET_UPLOAD_WIN_TARGETPATH:"D:\\testFile"}
# Linux环境路径
linux:
filePath: ${NET_UPLOAD_LINUX_FILEPATH:"/app/data/fileAll/"}
targetPath: ${NET_UPLOAD_LINUX_TARGETPATH:"/app/data/testFile"}
upload-two:
# Windows环境路径
win:
filePath: ${NET_UPLOAD_TWO_WIN_FILEPATH:"D:\\fileAllTwo\\"}
targetPath: ${NET_UPLOAD_TWO_WIN_TARGETPATH:"D:\\testFileTwo"}
# Linux环境路径
linux:
filePath: ${NET_UPLOAD_TWO_LINUX_FILEPATH:"/app/data/fileAllTwo/"}
targetPath: ${NET_UPLOAD_TWO_LINUX_TARGETPATH:"/app/data/testFileTwo"}
jzt:
# Windows环境路径
win:
filePath: ${NET_JZT_WIN_FILEPATH:"D:\\fileAll\\"}
targetPath: ${NET_JZT_WIN_TARGETPATH:"D:\\testFile"}
# Linux环境路径
linux:
filePath: ${NET_JZT_LINUX_FILEPATH:"/app/data/fileAll/"}
targetPath: ${NET_JZT_LINUX_TARGETPATH:"/app/data/testFile"}
# 友虹OCR配置
youhong:
integrate: ${YOUHONG_INTEGRATE:true}
baseUrl: ${YOUHONG_BASE_URL:http://prod-ocr-server:9000/v1/}
# 湿度监控IP
humidityIp: ${HUMIDITY_IP:10.19.16.64:1433}
# 上架IP
shelvingIp: ${SHELVING_IP:10.19.16.64:1236}
# 移车系统集成配置
ycj:
driverClassName: ${YCJ_DRIVER:com.mysql.jdbc.Driver}
url: jdbc:mysql://${YCJ_HOST:prod-mysql-server}:${YCJ_PORT:3306}/${YCJ_DB_NAME:dagl}?useUnicode=true&characterEncoding=utf8&characterSetResults=utf8
username: ${YCJ_USERNAME:prod_user}
password: ${YCJ_PASSWORD:prod_password}
# 温湿度监控API配置
temperature:
getAllWareHouseList: ${TEMP_API_BASE_URL:http://192.168.0.126:8080}/api/getAllWareHouseList
getAirRealTimeAnd24HDataByWareHouseId: ${TEMP_API_BASE_URL:http://192.168.0.126:8080}/api/getAirRealTimeAnd24HDataByWareHouseId
# OCR配置Docker环境中Tesseract安装路径
ocr:
tessPath: ${TESS_PATH:/usr/bin/tesseract}

View File

@@ -1,3 +1,6 @@
# Spring Boot 应用配置
spring:
application:
name: point-strategy
profiles:
active: dev
active: dev # 默认激活dev环境可通过启动参数覆盖

View File

@@ -13,10 +13,6 @@
<!-- 文档保留总大小 -->
<property name="totalSizeCap" value="50GB"/>
<!-- name的值是变量的名称value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义后可以使“${}”来使用变量。 -->
<!-- <property name="log.path" value="/logs" /> -->
<property name="log.path" value="logs" />
<!--0. 日志格式和颜色渲染 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
@@ -35,146 +31,139 @@
</encoder>
</appender>
<!--2. 输出到文档-->
<!-- 2.1 level为 DEBUG 日志,时间滚动输出 -->
<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文档的路径及文档名 -->
<file>${log.path}/web_debug.log</file>
<!--日志文档输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志归档 -->
<fileNamePattern>${log.path}/web-debug-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文档保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文档只记录debug级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>debug</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 生产环境:写入与 Dockerfile 保持一致的 /app/logs -->
<springProfile name="prod">
<!-- name的值是变量的名称value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义后可以使“${}”来使用变量。 -->
<property name="log.path" value="/app/logs" />
<!-- 2.2 level为 INFO 日志,时间滚动输出 -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文档的路径及文档名 -->
<file>${log.path}/web_info.log</file>
<!--日志文档输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 每天日志归档路径以及格式 -->
<fileNamePattern>${log.path}/web-info-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文档保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文档只记录info级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>info</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!--2. 输出到文档-->
<!-- 2.1 level为 DEBUG 日志,时间滚动输出 -->
<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文档的路径及文档名 -->
<file>${log.path}/web_debug.log</file>
<!--日志文档输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志归档 -->
<fileNamePattern>${log.path}/web-debug-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文档保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文档只记录debug级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>debug</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 2.3 level为 WARN 日志,时间滚动输出 -->
<appender name="WARN_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文档的路径及文档名 -->
<file>${log.path}/web_warn.log</file>
<!--日志文档输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/web-warn-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文档保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文档只记录warn级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>warn</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 2.2 level为 INFO 日志,时间滚动输出 -->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文档的路径及文档名 -->
<file>${log.path}/web_info.log</file>
<!--日志文档输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 每天日志归档路径以及格式 -->
<fileNamePattern>${log.path}/web-info-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文档保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文档只记录info级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>info</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 2.4 level为 ERROR 日志,时间滚动输出 -->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文档的路径及文档名 -->
<file>${log.path}/web_error.log</file>
<!--日志文档输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/web-error-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文档保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文档只记录ERROR级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 2.3 level为 WARN 日志,时间滚动输出 -->
<appender name="WARN_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文档的路径及文档名 -->
<file>${log.path}/web_warn.log</file>
<!--日志文档输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/web-warn-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文档保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文档只记录warn级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>warn</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!--
<logger>用来设置某一个包或者具体的某一个类的日志打印级别、以及指定<appender>。<logger>仅有一个name属性 一个可选的level和一个可选的addtivity属性。
name:用来指定受此logger约束的某一个包或者具体的某一个类。
level:用来设置打印级别大小写无关TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF
还有一个特俗值INHERITED或者同义词NULL代表强制执行上级的级别。
如果未设置此属性那么当前logger将会继承上级的级别。
addtivity:是否向上级logger传递打印信息。默认是true。
<!-- 2.4 level为 ERROR 日志,时间滚动输出 -->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 正在记录的日志文档的路径及文档名 -->
<file>${log.path}/web_error.log</file>
<!--日志文档输出格式-->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<charset>UTF-8</charset> <!-- 此处设置字符集 -->
</encoder>
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/web-error-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>100MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文档保留天数-->
<maxHistory>15</maxHistory>
</rollingPolicy>
<!-- 此日志文档只记录ERROR级别的 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<logger name="org.springframework.web" level="info"/>
<logger name="org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor" level="INFO"/>
-->
<root level="debug">
<appender-ref ref="CONSOLE" />
<appender-ref ref="DEBUG_FILE" />
<appender-ref ref="INFO_FILE" />
<appender-ref ref="WARN_FILE" />
<appender-ref ref="ERROR_FILE" />
</root>
</springProfile>
<!--
使用mybatis的时候sql语句是debug下才会打印而这里我们只配置了info所以想要查看sql语句的话有以下两种操作
第一种把<root level="info">改成<root level="DEBUG">这样就会打印sql不过这样日志那边会出现很多其他消息
第二种就是单独给dao下目录配置debug模式代码如下这样配置sql语句会打印其他还是正常info级别
<!-- 开发环境:仅输出到控制台,避免写文件 -->
<springProfile name="dev">
<root level="debug">
<appender-ref ref="CONSOLE" />
</root>
</springProfile>
【logging.level.org.mybatis=debug logging.level.dao=debug】
-->
<!-- 其他环境兜底为控制台输出 -->
<springProfile name="!prod &amp; !dev">
<root level="debug">
<appender-ref ref="CONSOLE" />
</root>
</springProfile>
<!--
root节点是必选节点用来指定最基础的日志输出级别只有一个level属性
level:用来设置打印级别大小写无关TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF不能设置为INHERITED或者同义词NULL。默认是DEBUG
标识这个appender将会添加到这个logger。
-->
<root level="debug">
<appender-ref ref="CONSOLE" />
<!-- <appender-ref ref="DEBUG_FILE" />-->
<!-- <appender-ref ref="INFO_FILE" />-->
<!-- <appender-ref ref="WARN_FILE" />-->
<!-- <appender-ref ref="ERROR_FILE" />-->
</root>
</configuration>
</configuration>

View File

@@ -57,10 +57,10 @@
</if>
</select>
<insert id="insert" parameterType="com.point.strategy.bean.TtableDescription" >
insert into t_table_description (table_name, table_chn_name, table_auxi_name,
insert into t_table_description (table_name, table_chn_name, table_auxi_name,
table_type, entity_id, note
)
values (#{tableName,jdbcType=VARCHAR}, #{tableChnName,jdbcType=VARCHAR}, #{tableAuxiName,jdbcType=VARCHAR},
values (#{tableName,jdbcType=VARCHAR}, #{tableChnName,jdbcType=VARCHAR}, #{tableAuxiName,jdbcType=VARCHAR},
#{tableType,jdbcType=VARCHAR}, #{entityId,jdbcType=INTEGER}, #{note,jdbcType=VARCHAR}
)
</insert>
@@ -107,15 +107,12 @@
</if>
</trim>
</insert>
<select id="selectTtableDescription" resultType="com.point.strategy.bean.TtableDescription" parameterType="java.lang.Integer" >
select
<include refid="Base_Column_List" />
from t_table_description
where 1=1
<if test="entityId != null" >
and entity_Id = #{entityId}
</if>
where entity_Id = #{entityId}
</select>
<select id="selectTtableDescOne" resultType="com.point.strategy.bean.TtableDescription" parameterType="string" >
@@ -144,7 +141,7 @@
where t1.id=#{classId}
</select>
<delete id="deleteTtableDescription" parameterType="java.lang.Integer">
delete from t_table_description
where 1=1
@@ -152,6 +149,6 @@
and entity_Id = #{entityId}
</if>
</delete>
</mapper>
</mapper>

View File

@@ -70,7 +70,7 @@
<if test="archiveNo!=null and archiveNo!='' ">
and archive_no = #{archiveNo}
</if>
</select>
<!--根据id查询-->
@@ -325,7 +325,7 @@
<select id="getDocOriginalEntityCount" parameterType="java.lang.Integer" resultType="java.lang.Integer" >
select count(*) from wsjh_20201103104220949_temp_file where rec_id = '${_parameter}' and file_status=1
</select>
<insert id="saveDocOriginalEntity" parameterType="DocOriginalEntity" >
insert into wsjh_20201103104220949_temp_file(
@@ -442,7 +442,7 @@
#{ dividedContent}
)
</insert>
<select id="queryDocOriginalEntity" parameterType="Integer" resultType="DocOriginalEntity" >
select * from wsjh_20201103104220949_temp_file where rec_id = '${_parameter}' and file_status=1
</select>
@@ -469,15 +469,28 @@
</trim>
WHERE id='${_parameter}'
</update>
<update id="updateObject" parameterType="java.util.HashMap">
UPDATE
${tableName}
set
${fieldValue}
WHERE
WHERE
${conditionSql}
</update>
<update id="updateFileName">
UPDATE ${tableName}
SET file_name = CASE
<foreach collection="list" item="item" separator="">
WHEN id = #{item.id} THEN #{item.fileName}
</foreach>
ELSE file_name
END
WHERE id IN
<foreach collection="list" item="item" open="(" separator="," close=")">
#{item.id}
</foreach>
</update>
<delete id="deleteDocOriginalEntityRecycle" parameterType="java.lang.Integer">
delete from wsjh_20201103104220949_temp_file where 1=1 and id = #{id}
@@ -507,15 +520,15 @@
<select id="selectArchiveFileNumberObject" parameterType="java.util.HashMap" resultType="java.lang.Integer" >
select archive_file_num from ${tableName} where ${conditionSql}
</select>
<delete id="deleteObject" parameterType="java.util.HashMap">
delete from ${tableName} where ${conditionSql}
</delete>
<insert id="saveObject" parameterType="java.util.HashMap" >
insert into ${tableName} (${fieldName}) values (${valueName})
</insert>
<select id="queryDocOriginalEntityById" resultType="DocOriginalEntity">
select id,
entity_id,
@@ -564,5 +577,9 @@
</where>
order by page_no asc
</select>
<select id="selectArchiveNo" resultType="java.lang.String">
select archive_no from ${tableName} where id = #{recId}
</select>
</mapper>
</mapper>

View File

@@ -15,7 +15,7 @@
<result column="counts" property="counts" jdbcType="INTEGER" />
</resultMap>
<sql id="Base_Column_List" >
id, username, user_chn_name, password, phone, email, mark_enable,dept_id,"level",
id, username, user_chn_name, password, phone, email, mark_enable,dept_id,level,
create_time,counts
</sql>
<select id="selectByPrimaryKey" resultMap="BaseResultMap" parameterType="java.lang.Integer" >
@@ -29,11 +29,11 @@
where id = #{id,jdbcType=INTEGER}
</delete>
<insert id="insert" parameterType="com.point.strategy.user.bean.User" >
insert into t_user (id, username, user_chn_name,
password, phone, email,
insert into t_user (id, username, user_chn_name,
password, phone, email,
mark_enable,create_time,counts)
values (#{id,jdbcType=INTEGER}, #{username,jdbcType=VARCHAR}, #{userChnName,jdbcType=VARCHAR},
#{password,jdbcType=VARCHAR}, #{phone,jdbcType=CHAR}, #{email,jdbcType=VARCHAR},
values (#{id,jdbcType=INTEGER}, #{username,jdbcType=VARCHAR}, #{userChnName,jdbcType=VARCHAR},
#{password,jdbcType=VARCHAR}, #{phone,jdbcType=CHAR}, #{email,jdbcType=VARCHAR},
#{markEnable,jdbcType=INTEGER},#{createTime,jdbcType=VARCHAR},#{counts,jdbcType=INTEGER})
</insert>
<insert id="insertSelective" parameterType="com.point.strategy.user.bean.User" useGeneratedKeys="true" keyProperty="id" keyColumn="id">
@@ -225,4 +225,4 @@
where userid = #{userId,jdbcType=INTEGER}
</select>
</mapper>
</mapper>

View File

@@ -1,3 +0,0 @@
This is where language files should be placed.
Please DO NOT translate these directly use this service: https://www.transifex.com/projects/p/tinymce/

View File

@@ -1,99 +0,0 @@
TinyMCE - JavaScript Library for Rich Text Editing
===================================================
Building TinyMCE
-----------------
Install [Node.js](https://nodejs.org/en/) on your system.
Clone this repository on your system
```
$ git clone https://github.com/tinymce/tinymce.git
```
Open a console and go to the project directory.
```
$ cd tinymce/
```
Install `grunt` command line tool globally.
```
$ npm i -g grunt-cli
```
Install all package dependencies.
```
$ npm install
```
Now, build TinyMCE by using `grunt`.
```
$ grunt
```
Build tasks
------------
`grunt`
Lints, compiles, minifies and creates release packages for TinyMCE. This will produce the production ready packages.
`grunt start`
Starts a webpack-dev-server that compiles the core, themes, plugins and all demos. Go to `localhost:3000` for a list of links to all the demo pages.
`grunt dev`
Runs tsc, webpack and less. This will only produce the bare essentials for a development build and is a lot faster.
`grunt test`
Runs all tests on PhantomJS.
`grunt bedrock-manual`
Runs all tests manually in a browser.
`grunt bedrock-auto:<browser>`
Runs all tests through selenium browsers supported are chrome, firefox, ie, MicrosoftEdge, chrome-headless and phantomjs.
`grunt webpack:core`
Builds the demo js files for the core part of tinymce this is required to get the core demos working.
`grunt webpack:plugins`
Builds the demo js files for the plugins part of tinymce this is required to get the plugins demos working.
`grunt webpack:themes`
Builds the demo js files for the themes part of tinymce this is required to get the themes demos working.
`grunt webpack:<name>-plugin`
Builds the demo js files for the specific plugin.
`grunt webpack:<name>-theme`
Builds the demo js files for the specific theme.
`grunt --help`
Displays the various build tasks.
Bundle themes and plugins into a single file
---------------------------------------------
`grunt bundle --themes=modern --plugins=table,paste`
Minifies the core, adds the modern theme and adds the table and paste plugin into tinymce.min.js.
Contributing to the TinyMCE project
------------------------------------
TinyMCE is an open source software project and we encourage developers to contribute patches and code to be included in the main package of TinyMCE.
__Basic Rules__
* Contributed code will be licensed under the LGPL license but not limited to LGPL
* Copyright notices will be changed to Ephox Corporation, contributors will get credit for their work
* All third party code will be reviewed, tested and possibly modified before being released
* All contributors will have to have signed the Contributor License Agreement
These basic rules ensures that the contributed code remains open source and under the LGPL license.
__How to Contribute to the Code__
The TinyMCE source code is [hosted on Github](https://github.com/tinymce/tinymce). Through Github you can submit pull requests and log new bugs and feature requests.
When you submit a pull request, you will get a notice about signing the __Contributors License Agreement (CLA)__.
You should have a __valid email address on your GitHub account__, and you will be sent a key to verify your identity and digitally sign the agreement.
After you signed your pull request will automatically be ready for review & merge.
__How to Contribute to the Docs__
Docs are hosted on Github in the [tinymce-docs](https://github.com/tinymce/tinymce-docs) repo.
[How to contribute](https://www.tinymce.com/docs/advanced/contributing-docs/) to the docs, including a style guide, can be found on the TinyMCE website.

View File

@@ -1,3 +0,0 @@
This is where language files should be placed.
Please DO NOT translate these directly use this service: https://www.transifex.com/projects/p/tinymce/

View File

@@ -1 +0,0 @@
Icons are generated and provided by the http://icomoon.io service.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 111 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 111 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 127 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 197 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 192 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 412 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 112 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 285 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 189 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 111 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 164 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 486 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 124 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 110 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 148 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 332 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 93 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 203 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 354 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 212 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 201 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

Some files were not shown because too many files have changed in this diff Show More