Browse Source

添加 search 模块, 提供查询服务

reghao 1 year ago
parent
commit
40f04f2fca
45 changed files with 3329 additions and 5 deletions
  1. 1 0
      README.md
  2. 2 2
      content/content-service/pom.xml
  3. 2 2
      content/content-service/src/main/java/cn/reghao/tnb/content/app/vod/service/impl/SearchServiceImpl.java
  4. 1 1
      content/content-service/src/test/java/cn/reghao/tnb/content/app/vod/service/SearchTest.java
  5. 1 0
      pom.xml
  6. 24 0
      search/pom.xml
  7. 21 0
      search/search-api/pom.xml
  8. 32 0
      search/search-api/src/main/java/cn/reghao/tnb/search/api/dto/VideoSummary.java
  9. 15 0
      search/search-api/src/main/java/cn/reghao/tnb/search/api/iface/DataSearchService.java
  10. 7 0
      search/search-service/Dockerfile
  11. 185 0
      search/search-service/pom.xml
  12. 34 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/EsSearch.java
  13. 205 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/LoggingService.java
  14. 15 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/SearchApplication.java
  15. 21 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/config/ElasticProperties.java
  16. 41 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/config/MyLuceneAnalysisConfigurer.java
  17. 56 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/controller/SearchController.java
  18. 141 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/es/DocumentService.java
  19. 77 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/es/ElasticService.java
  20. 129 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/es/IndexService.java
  21. 81 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/es/MappingService.java
  22. 207 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/es/QueryService.java
  23. 434 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/es/SearchService.java
  24. 208 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/ip/IPLocation.java
  25. 23 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/ip/IpTool.java
  26. 19 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/ip/Location.java
  27. 60 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/lucene/LuceneDocument.java
  28. 160 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/lucene/LuceneIndex.java
  29. 323 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/lucene/LuceneQuery.java
  30. 33 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/model/po/NginxLog.java
  31. 35 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/model/po/VideoText.java
  32. 42 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/model/po/Wenshu.java
  33. 17 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/model/vo/SearchResult.java
  34. 42 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/model/vo/VideoCard.java
  35. 19 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/model/vo/VideoProjection.java
  36. 130 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/rpc/DataSearchServiceImpl.java
  37. 38 0
      search/search-service/src/main/java/cn/reghao/tnb/search/app/util/ClassUtil.java
  38. 21 0
      search/search-service/src/main/resources/application-cluster.yml
  39. 21 0
      search/search-service/src/main/resources/application-dev.yml
  40. 21 0
      search/search-service/src/main/resources/application-test.yml
  41. 38 0
      search/search-service/src/main/resources/application.yml
  42. 74 0
      search/search-service/src/main/resources/logback-spring.xml
  43. 270 0
      search/search-service/src/test/java/SearchTest.java
  44. 1 0
      zzz/build_jar.sh
  45. 2 0
      zzz/install_tnb.sh

+ 1 - 0
README.md

@@ -26,6 +26,7 @@ tnb 项目模块:
 - file:6004
 - file:6004
 - content:6005
 - content:6005
 - data:6006
 - data:6006
+- search:6007
 
 
 ## 构建部署
 ## 构建部署
 ### pull 项目源码
 ### pull 项目源码

+ 2 - 2
content/content-service/pom.xml

@@ -34,8 +34,8 @@
             <version>1.0.0-SNAPSHOT</version>
             <version>1.0.0-SNAPSHOT</version>
         </dependency>
         </dependency>
         <dependency>
         <dependency>
-            <groupId>cn.reghao.bnt</groupId>
-            <artifactId>admin-api</artifactId>
+            <groupId>cn.reghao.tnb.search</groupId>
+            <artifactId>search-api</artifactId>
             <version>1.0.0-SNAPSHOT</version>
             <version>1.0.0-SNAPSHOT</version>
         </dependency>
         </dependency>
         <dependency>
         <dependency>

+ 2 - 2
content/content-service/src/main/java/cn/reghao/tnb/content/app/vod/service/impl/SearchServiceImpl.java

@@ -1,7 +1,5 @@
 package cn.reghao.tnb.content.app.vod.service.impl;
 package cn.reghao.tnb.content.app.vod.service.impl;
 
 
-import cn.reghao.bnt.admin.api.dto.VideoSummary;
-import cn.reghao.bnt.admin.api.iface.DataSearchService;
 import cn.reghao.jutil.jdk.db.Page;
 import cn.reghao.jutil.jdk.db.Page;
 import cn.reghao.jutil.jdk.db.PageList;
 import cn.reghao.jutil.jdk.db.PageList;
 import cn.reghao.tnb.common.auth.UserContext;
 import cn.reghao.tnb.common.auth.UserContext;
@@ -17,6 +15,8 @@ import cn.reghao.tnb.content.app.vod.service.ContentPermission;
 import cn.reghao.tnb.content.app.vod.service.SearchService;
 import cn.reghao.tnb.content.app.vod.service.SearchService;
 import cn.reghao.tnb.content.app.vod.service.VideoPostQuery;
 import cn.reghao.tnb.content.app.vod.service.VideoPostQuery;
 import cn.reghao.tnb.content.app.util.redis.ds.RedisString;
 import cn.reghao.tnb.content.app.util.redis.ds.RedisString;
+import cn.reghao.tnb.search.api.dto.VideoSummary;
+import cn.reghao.tnb.search.api.iface.DataSearchService;
 import cn.reghao.tnb.user.api.iface.UserService;
 import cn.reghao.tnb.user.api.iface.UserService;
 import org.apache.dubbo.config.annotation.DubboReference;
 import org.apache.dubbo.config.annotation.DubboReference;
 import org.springframework.stereotype.Service;
 import org.springframework.stereotype.Service;

+ 1 - 1
content/content-service/src/test/java/cn/reghao/tnb/content/app/vod/service/SearchTest.java

@@ -1,9 +1,9 @@
 package cn.reghao.tnb.content.app.vod.service;
 package cn.reghao.tnb.content.app.vod.service;
 
 
-import cn.reghao.bnt.admin.api.iface.DataSearchService;
 import cn.reghao.jutil.jdk.db.PageList;
 import cn.reghao.jutil.jdk.db.PageList;
 import cn.reghao.tnb.content.api.dto.VideoCard;
 import cn.reghao.tnb.content.api.dto.VideoCard;
 import cn.reghao.tnb.content.app.ContentApplication;
 import cn.reghao.tnb.content.app.ContentApplication;
+import cn.reghao.tnb.search.api.iface.DataSearchService;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.dubbo.config.annotation.DubboReference;
 import org.apache.dubbo.config.annotation.DubboReference;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Test;

+ 1 - 0
pom.xml

@@ -19,6 +19,7 @@
         <module>user</module>
         <module>user</module>
         <module>data</module>
         <module>data</module>
         <module>eureka</module>
         <module>eureka</module>
+        <module>search</module>
     </modules>
     </modules>
 
 
     <properties>
     <properties>

+ 24 - 0
search/pom.xml

@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>tnb</artifactId>
+        <groupId>cn.reghao.tnb</groupId>
+        <version>1.0.0</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>search</artifactId>
+    <packaging>pom</packaging>
+    <modules>
+        <module>search-api</module>
+        <module>search-service</module>
+    </modules>
+
+    <properties>
+        <maven.compiler.source>11</maven.compiler.source>
+        <maven.compiler.target>11</maven.compiler.target>
+    </properties>
+
+</project>

+ 21 - 0
search/search-api/pom.xml

@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>search</artifactId>
+        <groupId>cn.reghao.tnb</groupId>
+        <version>1.0.0</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <groupId>cn.reghao.tnb.search</groupId>
+    <artifactId>search-api</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+
+    <properties>
+        <maven.compiler.source>11</maven.compiler.source>
+        <maven.compiler.target>11</maven.compiler.target>
+    </properties>
+
+</project>

+ 32 - 0
search/search-api/src/main/java/cn/reghao/tnb/search/api/dto/VideoSummary.java

@@ -0,0 +1,32 @@
+package cn.reghao.tnb.search.api.dto;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+import java.io.Serializable;
+
+/**
+ * @author reghao
+ * @date 2025-03-27 13:58:11
+ */
+@AllArgsConstructor
+@NoArgsConstructor
+@Setter
+@Getter
+public class VideoSummary implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    private String videoId;
+    private String title;
+    private String description;
+    private Boolean vip;
+
+    public VideoSummary(String videoId, String title, Boolean vip) {
+        this.videoId = videoId;
+        this.title = title;
+        this.description = title;
+        this.vip = vip;
+    }
+}

+ 15 - 0
search/search-api/src/main/java/cn/reghao/tnb/search/api/iface/DataSearchService.java

@@ -0,0 +1,15 @@
+package cn.reghao.tnb.search.api.iface;
+
+import cn.reghao.tnb.search.api.dto.VideoSummary;
+import cn.reghao.jutil.jdk.db.PageList;
+
+/**
+ * @author reghao
+ * @date 2025-03-27 13:45:23
+ */
+public interface DataSearchService {
+    void addVideoSummary(VideoSummary videoSummary);
+    void updateVideoSummary(VideoSummary videoSummary);
+    void deleteVideoSummary(String videoId);
+    PageList<VideoSummary> searchVideo(String keyword, String nextIdStr, int pageNumber);
+}

+ 7 - 0
search/search-service/Dockerfile

@@ -0,0 +1,7 @@
+FROM adoptopenjdk/openjdk11:x86_64-alpine-jre-11.0.15_10
+
+WORKDIR /app
+RUN ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && echo 'Asia/Shanghai' >/etc/timezone
+COPY target/tnb-search.jar /app/tnb-search.jar
+
+ENTRYPOINT ["java","-Djava.security.egd=file:/dev/./urandom","-jar","/app/tnb-search.jar"]

+ 185 - 0
search/search-service/pom.xml

@@ -0,0 +1,185 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>search</artifactId>
+        <groupId>cn.reghao.tnb</groupId>
+        <version>1.0.0</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <groupId>cn.reghao.tnb.search</groupId>
+    <artifactId>search-service</artifactId>
+
+    <properties>
+        <maven.compiler.source>11</maven.compiler.source>
+        <maven.compiler.target>11</maven.compiler.target>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>cn.reghao.tnb.search</groupId>
+            <artifactId>search-api</artifactId>
+            <version>1.0.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>cn.reghao.tnb.content</groupId>
+            <artifactId>content-api</artifactId>
+            <version>1.0.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>cn.reghao.tnb.user</groupId>
+            <artifactId>user-api</artifactId>
+            <version>1.0.0-SNAPSHOT</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+            <version>8.0.17</version>
+        </dependency>
+        <dependency>
+            <groupId>com.zaxxer</groupId>
+            <artifactId>HikariCP</artifactId>
+            <version>3.3.1</version>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-data-jpa</artifactId>
+        </dependency>
+
+        <!-- lucene -->
+        <dependency>
+            <groupId>org.hibernate.search</groupId>
+            <artifactId>hibernate-search-mapper-orm</artifactId>
+            <version>6.2.4.Final</version>
+        </dependency>
+        <dependency>
+            <groupId>org.hibernate.search</groupId>
+            <artifactId>hibernate-search-backend-lucene</artifactId>
+            <version>6.2.4.Final</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-core</artifactId>
+            <version>8.11.2</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-queryparser</artifactId>
+            <version>8.11.2</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-highlighter</artifactId>
+            <version>8.11.2</version>
+        </dependency>
+        <dependency>
+            <groupId>com.github.magese</groupId>
+            <artifactId>ik-analyzer</artifactId>
+            <version>8.5.0</version>
+        </dependency>
+
+        <!-- elasticsearch -->
+        <dependency>
+            <groupId>co.elastic.clients</groupId>
+            <artifactId>elasticsearch-java</artifactId>
+            <version>7.17.18</version>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch.client</groupId>
+            <artifactId>elasticsearch-rest-client</artifactId>
+            <version>7.17.18</version>
+        </dependency>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>2.17.0</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.fasterxml.jackson.core</groupId>
+                    <artifactId>jackson-core</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-core</artifactId>
+            <version>2.17.0</version>
+        </dependency>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-annotations</artifactId>
+            <version>2.17.0</version>
+        </dependency>
+        <dependency>
+            <groupId>jakarta.json</groupId>
+            <artifactId>jakarta.json-api</artifactId>
+            <version>2.0.1</version>
+        </dependency>
+
+        <dependency>
+            <groupId>io.springfox</groupId>
+            <artifactId>springfox-boot-starter</artifactId>
+            <version>3.0.0</version>
+        </dependency>
+    </dependencies>
+
+    <profiles>
+        <profile>
+            <id>dev</id>
+            <properties>
+                <profile.active>dev</profile.active>
+            </properties>
+            <activation>
+                <activeByDefault>true</activeByDefault>
+            </activation>
+        </profile>
+        <profile>
+            <id>test</id>
+            <properties>
+                <profile.active>test</profile.active>
+            </properties>
+        </profile>
+        <profile>
+            <id>cluster</id>
+            <properties>
+                <profile.active>cluster</profile.active>
+            </properties>
+        </profile>
+    </profiles>
+
+    <build>
+        <finalName>tnb-search</finalName>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+                <filtering>true</filtering>
+                <includes>
+                    <!--<include>bootstrap.yml</include>-->
+                    <include>application.yml</include>
+                    <include>application-${profile.active}.yml</include>
+                    <include>mapper/**</include>
+                    <include>*.xml</include>
+                </includes>
+            </resource>
+        </resources>
+
+        <plugins>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
+                <configuration>
+                    <executable>true</executable>
+                    <fork>true</fork>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+</project>

+ 34 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/EsSearch.java

@@ -0,0 +1,34 @@
+package cn.reghao.tnb.search.app;
+
+import cn.reghao.tnb.search.app.es.ElasticService;
+import cn.reghao.tnb.search.app.es.QueryService;
+import cn.reghao.tnb.search.app.model.po.Wenshu;
+import org.springframework.data.domain.Page;
+import org.springframework.data.domain.Pageable;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author reghao
+ * @date 2025-03-17 09:49:48
+ */
+@Service
+public class EsSearch {
+    private final QueryService<Wenshu> queryService;
+    private String index = "wenshu";
+
+    public EsSearch(ElasticService elasticService) {
+        this.queryService = new QueryService<>(elasticService);
+    }
+
+    public Page<Wenshu> search(String keyword, Pageable pageable) {
+        int pn = pageable.getPageNumber()+1;
+        int ps = pageable.getPageSize();
+        Page<Wenshu> page = queryService.queryWithHighlight(index, keyword, pn, ps, Wenshu.class);
+        return page;
+    }
+
+    public Wenshu getById(String id) {
+        Wenshu wenshu = queryService.queryById(Wenshu.class, index, id);
+        return wenshu;
+    }
+}

+ 205 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/LoggingService.java

@@ -0,0 +1,205 @@
+package cn.reghao.tnb.search.app;
+
+import ch.qos.logback.classic.Level;
+import ch.qos.logback.classic.Logger;
+import ch.qos.logback.classic.LoggerContext;
+import cn.reghao.tnb.search.app.config.ElasticProperties;
+import cn.reghao.tnb.search.app.es.*;
+import cn.reghao.tnb.search.app.model.po.NginxLog;
+import cn.reghao.jutil.jdk.converter.DateTimeConverter;
+import cn.reghao.jutil.jdk.serializer.JsonConverter;
+import cn.reghao.jutil.jdk.text.TextFile;
+import cn.reghao.jutil.tool.id.SnowFlake;
+import co.elastic.clients.elasticsearch._types.mapping.Property;
+import lombok.extern.slf4j.Slf4j;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.text.ParseException;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeFormatter;
+import java.util.*;
+
+/**
+ * @author reghao
+ * @date 2023-11-08 10:08:02
+ */
+@Slf4j
+@Service
+public class LoggingService {
+    static SnowFlake idGenerator = new SnowFlake(1, 1);
+
+    public List getChartData() throws ParseException {
+        TextFile textFile = new TextFile();
+        String filePath = "nginx.log";
+        List<String> list = textFile.read(filePath);
+
+        List<NginxLog> nginxLogs = new ArrayList<>();
+        for (String line : list) {
+            if (!line.startsWith("{")) {
+                continue;
+            }
+
+            try {
+                NginxLog nginxLog = JsonConverter.jsonToObject(line, NginxLog.class);
+                nginxLogs.add(nginxLog);
+            } catch (Exception e) {
+                // e.printStackTrace();
+            }
+        }
+
+        Map<Long, Integer> map = new TreeMap<>();
+        for (NginxLog nginxLog : nginxLogs) {
+            String date = nginxLog.getTimeIso8601();
+            DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss+08:00")
+                    .withZone(ZoneId.of("UTC"));
+            LocalDateTime localDateTime = LocalDateTime.parse(date, formatter);
+            Long timestamp = localDateTime.toEpochSecond(ZoneOffset.of("+8"));
+            Long key = timestamp;
+
+            Integer count = map.get(key);
+            if (count == null) {
+                map.put(key, 1);
+            } else {
+                int count1 = map.get(key) + 1;
+                map.put(key, count1);
+            }
+        }
+
+        LocalDateTime localDateTime = LocalDateTime.now();
+        Long baseKey = localDateTime.toEpochSecond(ZoneOffset.of("+8"));
+
+        List<String> xList = new ArrayList<>();
+        List<Integer> yList = new ArrayList<>();
+        Set<Long> keys = new HashSet<>();
+        for (Long key : map.keySet()) {
+            if (key < baseKey) {
+                //xList.add(DateTimeConverter.format(key*1000).split(" ")[1]);
+                xList.add(DateTimeConverter.format(key*1000));
+                yList.add(map.get(key));
+                keys.add(key);
+            }
+        }
+
+        keys.forEach(map::remove);
+        keys.clear();
+        List results = new ArrayList();
+        results.add(xList.toArray());
+        results.add(yList.toArray());
+        return results;
+    }
+
+    public static void readFileFileChannel(String filePath, String index, DocumentService documentService) {
+        List<String> lines = new ArrayList<>();
+        List<NginxLog> nginxLogs = new ArrayList<>();
+        File file = new File(filePath);
+        try {
+            FileInputStream fis = new FileInputStream(file);
+            FileChannel fileChannel = fis.getChannel();
+
+            // 10MB
+            int capacity = 10*1024*1024;
+            ByteBuffer byteBuffer = ByteBuffer.allocate(capacity);
+            StringBuffer buffer = new StringBuffer();
+            while(fileChannel.read(byteBuffer) != -1) {
+                //读取后,将位置置为0,将limit置为容量, 以备下次读入到字节缓冲中,从0开始存储
+                byteBuffer.clear();
+                byte[] bytes = byteBuffer.array();
+
+                String str = new String(bytes);
+                buffer.append(str);
+                String[] strArray = buffer.toString().split(System.lineSeparator());
+                for (int i = 0; i < strArray.length-1; i++) {
+                    try {
+                        NginxLog nginxLog = JsonConverter.jsonToObject(strArray[i], NginxLog.class);
+                        nginxLogs.add(nginxLog);
+                    } catch (Exception e) {
+                        lines.add(strArray[i]);
+                        //e.printStackTrace();
+                    }
+                }
+
+                String lastLine = strArray[strArray.length-1];
+                if (!lastLine.endsWith("}")) {
+                    buffer = new StringBuffer();
+                    buffer.append(strArray[strArray.length-1]);
+                }
+
+                while (nginxLogs.size() > 10_000) {
+                    nginxLogs.forEach(nginxLog -> nginxLog.setId(idGenerator.nextId()+""));
+
+                    //NginxLog nginxLog = nginxLogs.get(0);
+                    //documentService.update(index, nginxLog);
+
+                    documentService.batchAddDocument(index, nginxLogs);
+                    log.info("save {} nginxLogs", nginxLogs.size());
+                    nginxLogs.clear();
+                }
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        } finally {
+            // TODO close 处理
+        }
+    }
+
+    static void setLogLevel() {
+        LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
+        Logger rootLogger = loggerContext.getLogger("ROOT");
+        rootLogger.setLevel(Level.INFO);
+    }
+
+    public static void main(String[] args) throws Exception {
+        setLogLevel();
+
+        ElasticProperties elasticProperties = new ElasticProperties();
+        ElasticService elasticService = new ElasticService(elasticProperties);
+        IndexService indexService = new IndexService(elasticService);
+        MappingService mappingService = new MappingService();
+        DocumentService documentService = new DocumentService(elasticService);
+        SearchService searchService = new SearchService(elasticService);
+        QueryService<NginxLog> queryService = new QueryService<>(elasticService);
+        String index = "nginx_log";
+
+        //indexService.getIndex(index);
+        //indexService.getMapping(index);
+
+        String filePath = "/home/reghao/work/azy/data/access-20231107_073356-20240905_165944.log";
+        filePath = "/home/reghao/work/azy/data/access-20240905_165948-20250306_170553.log";
+        //readFileFileChannel(filePath, index, documentService);
+
+        //documentService.deleteAllDocument(index);
+        Map<String, Property> propertyMap = mappingService.getPropertyMap(NginxLog.class);
+        //indexService.deleteIndex(index);
+        //indexService.createIndex(index, propertyMap);
+
+        //searchService.search(index);
+//        search1(esClient, "app_log");
+//        index = "app_log";
+//        deleteAll(index);
+//        searchService.aggregate(index);
+
+        int pn = 1;
+        /*while (pn < 100) {
+            List<NginxLog> list = searchService.searchByPage(index, pn, "", "");
+            System.out.println();
+            pn++;
+        }*/
+
+        //searchService.searchAll(index);
+//        indexService.updateMapping(index);
+
+        //String queryString = "content";
+        //List<NginxLog> list = queryService.queryWithHighlight(index, queryString, pn, ps, NginxLog.class);
+        //searchService.searchAll(index);
+        searchService.aggregate(index);
+        //searchService.count(index);
+    }
+}

+ 15 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/SearchApplication.java

@@ -0,0 +1,15 @@
+package cn.reghao.tnb.search.app;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+/**
+ * @author reghao
+ * @date 2025-04-28 09:50:58
+ */
+@SpringBootApplication
+public class SearchApplication {
+    public static void main(String[] args) {
+        SpringApplication.run(SearchApplication.class, args);
+    }
+}

+ 21 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/config/ElasticProperties.java

@@ -0,0 +1,21 @@
+package cn.reghao.tnb.search.app.config;
+
+import lombok.Getter;
+import lombok.Setter;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * @author reghao
+ * @date 2025-03-26 10:14:02
+ */
+@Getter
+@Setter
+@Configuration
+@ConfigurationProperties(prefix = "es")
+public class ElasticProperties {
+    private String host;
+    private int port;
+    private String username;
+    private String password;
+}

+ 41 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/config/MyLuceneAnalysisConfigurer.java

@@ -0,0 +1,41 @@
+package cn.reghao.tnb.search.app.config;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.charfilter.HTMLStripCharFilterFactory;
+import org.apache.lucene.analysis.core.LowerCaseFilterFactory;
+import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilterFactory;
+import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
+import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
+import org.hibernate.search.backend.lucene.analysis.LuceneAnalysisConfigurationContext;
+import org.hibernate.search.backend.lucene.analysis.LuceneAnalysisConfigurer;
+import org.wltea.analyzer.lucene.IKAnalyzer;
+import org.wltea.analyzer.lucene.IKTokenizerFactory;
+
+/**
+ * @author reghao
+ * @date 2025-03-20 11:13:33
+ */
+public class MyLuceneAnalysisConfigurer implements LuceneAnalysisConfigurer {
+    @Override
+    public void configure(LuceneAnalysisConfigurationContext context) {
+        Analyzer ikAnalyzer = new IKAnalyzer();
+        context.analyzer("ikAnalyzer").custom()
+                .tokenizer(IKTokenizerFactory.class)
+                .charFilter(HTMLStripCharFilterFactory.class)
+                .tokenFilter(LowerCaseFilterFactory.class)
+                .tokenFilter(SnowballPorterFilterFactory.class)
+                .tokenFilter(ASCIIFoldingFilterFactory.class);
+
+        context.analyzer("english").custom()
+                .tokenizer(StandardTokenizerFactory.class)
+                .charFilter(HTMLStripCharFilterFactory.class)
+                .tokenFilter(LowerCaseFilterFactory.class)
+                .tokenFilter(SnowballPorterFilterFactory.class)
+                .param("language", "English")
+                .tokenFilter(ASCIIFoldingFilterFactory.class);
+
+        context.normalizer("lowercase").custom()
+                .tokenFilter(LowerCaseFilterFactory.class)
+                .tokenFilter(ASCIIFoldingFilterFactory.class);
+    }
+}

+ 56 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/controller/SearchController.java

@@ -0,0 +1,56 @@
+package cn.reghao.tnb.search.app.controller;
+
+import cn.reghao.tnb.search.app.EsSearch;
+import cn.reghao.tnb.search.app.model.po.Wenshu;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.data.domain.Page;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Pageable;
+import org.springframework.data.domain.Sort;
+import org.springframework.stereotype.Controller;
+import org.springframework.ui.ModelMap;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author reghao
+ * @date 2025-05-02 16:44:46
+ */
+@Api(tags = "博客前台页面")
+@Controller
+public class SearchController {
+    private EsSearch esSearch;
+
+    @ApiOperation(value = "裁判文书搜索页面", notes = "N")
+    @GetMapping("/wenshu")
+    public String wenshuSearchPage(@RequestParam(value = "search", required = false) String search,
+                                   @RequestParam(value = "searchType", required = false) String searchType,
+                                   ModelMap model) {
+        Pageable pageable = PageRequest.of(0, 10, Sort.by(Sort.Direction.ASC, "createTime"));
+        Page<Wenshu> page;
+        if (search == null) {
+            page = Page.empty();
+        } else {
+            page = esSearch.search(search, pageable);
+        }
+
+        if (searchType == null) {
+            searchType = "";
+        }
+
+        model.put("page", page);
+        model.put("search", search);
+        model.put("searchType", searchType);
+        return "/classic/wenshu/wenshu_search";
+    }
+
+    @ApiOperation(value = "裁判文书详情页面", notes = "N")
+    @GetMapping("/wenshu/{id}")
+    public String wenshuPage(@PathVariable("id") String id, ModelMap model) {
+        Wenshu wenshuResult = esSearch.getById(id);
+        model.put("wenshu", wenshuResult);
+        return "/classic/wenshu/wenshu";
+    }
+}

+ 141 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/es/DocumentService.java

@@ -0,0 +1,141 @@
+package cn.reghao.tnb.search.app.es;
+
+import cn.reghao.tnb.search.app.model.po.NginxLog;
+import cn.reghao.tnb.search.app.model.po.VideoText;
+import cn.reghao.tnb.search.app.model.po.Wenshu;
+import cn.reghao.jutil.tool.id.SnowFlake;
+import co.elastic.clients.elasticsearch.ElasticsearchAsyncClient;
+import co.elastic.clients.elasticsearch.ElasticsearchClient;
+import co.elastic.clients.elasticsearch._types.Result;
+import co.elastic.clients.elasticsearch._types.query_dsl.MatchAllQuery;
+import co.elastic.clients.elasticsearch.core.*;
+import co.elastic.clients.elasticsearch.core.bulk.BulkOperation;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author reghao
+ * @date 2025-03-12 10:45:58
+ */
+@Slf4j
+@Service
+public class DocumentService {
+    private final ElasticsearchClient esClient;
+    private final SnowFlake idGenerator;
+
+    public DocumentService(ElasticService elasticService) {
+        this.esClient = elasticService.getElasticsearchClient();
+        this.idGenerator = new SnowFlake(1, 1);
+    }
+
+    public void addDocument(String indexName, NginxLog product) throws IOException {
+        IndexResponse indexResponse = esClient.index(i -> i.index(indexName).id(product.getId()).document(product));
+        log.info("add one document result: {}", indexResponse.result().jsonValue());
+    }
+
+    public void batchAddDocument(String indexName, List<NginxLog> nginxLogs) throws IOException {
+        List<BulkOperation> bulkOperations = new ArrayList<>();
+        nginxLogs.forEach(p -> bulkOperations.add(BulkOperation.of(b -> b.index(c -> c.id(p.getId()).document(p)))));
+        BulkResponse bulkResponse = esClient.bulk(s -> s.index(indexName).operations(bulkOperations));
+        //bulkResponse.items().forEach(b -> log.info("bulk response result = {}", b.result()));
+        //log.error("bulk response.error() = {}", bulkResponse.errors());
+    }
+
+    public void batchAddDocument1(String indexName, List<Wenshu> wenshuList) throws IOException {
+        List<BulkOperation> bulkOperations = new ArrayList<>();
+        wenshuList.forEach(p -> bulkOperations.add(BulkOperation.of(b -> b.index(c -> c.id(p.getId()).document(p)))));
+        BulkResponse bulkResponse = esClient.bulk(s -> s.index(indexName).operations(bulkOperations));
+//        bulkResponse.items().forEach(b -> log.info("bulk response result = {}", b.result()));
+        //log.error("bulk response.error() = {}", bulkResponse.errors());
+    }
+
+    public void saveAll(String index, List<NginxLog> nginxLogs) throws IOException {
+        for (NginxLog nginxLog : nginxLogs) {
+            IndexResponse response = esClient.index(i -> i.index(index).document(nginxLog).id("" + idGenerator.nextId()));
+            Result result = response.result();
+            System.out.println(result.jsonValue());
+        }
+    }
+
+    public void updateDocument(String indexName, NginxLog product) throws IOException {
+        UpdateResponse<NginxLog> updateResponse = esClient.update(s -> s.index(indexName).id(product.getId()).doc(product), NginxLog.class);
+        log.info("update doc result: {}", updateResponse.result());
+    }
+
+    public void update(String index, NginxLog nginxLog) throws IOException {
+        String id = nginxLog.getId();
+        IndexResponse response = esClient.index(i -> i.index(index).document(nginxLog).id(id));
+        Result result = response.result();
+        System.out.println(result.jsonValue());
+    }
+
+    public void deleteDocument(String indexName, String id) {
+        try {
+            DeleteResponse deleteResponse = esClient.delete(s -> s.index(indexName).id(id));
+            log.info("del doc result: {}", deleteResponse.result());
+        } catch (IOException e) {
+            log.error("del doc failed, error: ", e);
+        }
+    }
+
+    /**
+     * 删除索引下的所有文档
+     *
+     * @param
+     * @return
+     * @date 2025-03-13 16:26:20
+     */
+    public void deleteAllDocument(String indexName) {
+        try {
+            DeleteByQueryRequest deleteByQueryRequest = DeleteByQueryRequest.of(s -> s.index(indexName)
+                    .query(m -> m.matchAll(new MatchAllQuery.Builder().build()))
+            );
+            DeleteByQueryResponse deleteByQueryResponse = esClient.deleteByQuery(deleteByQueryRequest);
+            log.info("del doc result: {}", deleteByQueryResponse.total());
+        } catch (IOException e) {
+            log.error("del doc failed, error: ", e);
+        }
+    }
+
+    public void batchDeleteDocument(String indexName, List<String> ids) {
+        List<BulkOperation> bulkOperations = new ArrayList<>();
+        ids.forEach(a -> bulkOperations.add(BulkOperation.of(b -> b.delete(c -> c.id(a)))));
+        try {
+            BulkResponse bulkResponse = esClient.bulk(a -> a.index(indexName).operations(bulkOperations));
+            bulkResponse.items().forEach(a -> log.info("batch del result: {}", a.result()));
+            log.error("batch del bulk resp errors: {}", bulkResponse.errors());
+        } catch (IOException e) {
+            log.error("batch del doc failed, error: ", e);
+        }
+    }
+
+    public void delete(ElasticsearchAsyncClient client, String index) throws IOException {
+        int id = 1;
+        client.delete(i -> i.index(index).id("" + id)).whenComplete((success, failure)->{
+            System.out.println(success.index());
+            System.out.println(success.version());
+        });
+    }
+
+    public void addVideoText(String indexName, VideoText videoText) throws IOException {
+        videoText.setId(""+idGenerator.nextId());
+        IndexResponse indexResponse = esClient.index(i -> i.index(indexName).id(videoText.getId()).document(videoText));
+        String jsonResult = indexResponse.result().jsonValue();
+    }
+
+    public void updateVideoText(String index, VideoText videoText) throws IOException {
+        String id = videoText.getId();
+        IndexResponse response = esClient.index(i -> i.index(index).document(videoText).id(id));
+        Result result = response.result();
+        System.out.println(result.jsonValue());
+    }
+
+    public void deleteVideoText(String index, VideoText videoText) throws IOException {
+        String id = videoText.getId();
+        DeleteResponse deleteResponse = esClient.delete(s -> s.index(index).id(id));
+    }
+}

+ 77 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/es/ElasticService.java

@@ -0,0 +1,77 @@
+package cn.reghao.tnb.search.app.es;
+
+import cn.reghao.tnb.search.app.config.ElasticProperties;
+import co.elastic.clients.elasticsearch.ElasticsearchAsyncClient;
+import co.elastic.clients.elasticsearch.ElasticsearchClient;
+import co.elastic.clients.json.jackson.JacksonJsonpMapper;
+import co.elastic.clients.transport.ElasticsearchTransport;
+import co.elastic.clients.transport.rest_client.RestClientTransport;
+import org.apache.http.HttpHost;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.CredentialsProvider;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.elasticsearch.client.RestClient;
+import org.elasticsearch.client.RestClientBuilder;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author reghao
+ * @date 2025-03-12 10:41:52
+ */
+@Service
+public class ElasticService {
+    private final ElasticsearchClient esClient;
+
+    public ElasticService(ElasticProperties elasticProperties) {
+        String host = elasticProperties.getHost();
+        int port = elasticProperties.getPort();
+        String username = elasticProperties.getUsername();
+        String password = elasticProperties.getPassword();
+        this.esClient = getElasticsearchClient(host, port, username, password);
+    }
+
+    private ElasticsearchClient getElasticsearchClient(String host, int port, String username, String password) {
+        CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
+        credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password));
+
+        // 创建 SSL 上下文
+        /*SSLContextBuilder sslBuilder = SSLContexts.custom().loadTrustMaterial(null, (chain, authType) -> true);
+        final SSLContext sslContext = sslBuilder.build();*/
+
+        HttpHost httpHost = new HttpHost(host, port);
+        //HttpHost httpHost = new HttpHost("192.168.0.212", 9200, "https");
+        // 创建 low-level client
+        RestClientBuilder builder = RestClient.builder(httpHost)
+                .setHttpClientConfigCallback(httpClientBuilder ->
+                        httpClientBuilder
+                                .setDefaultCredentialsProvider(credentialsProvider))
+                                /*.setSSLContext(sslContext)
+                                .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE))*/;
+        RestClient restClient = builder.build();
+
+        // 创建一个 Transport 通信和一个 JacksonJsonpMapper 序列化实例
+        ElasticsearchTransport transport = new RestClientTransport(restClient, new JacksonJsonpMapper());
+        // 得到一个 es 客户端
+        ElasticsearchClient client = new ElasticsearchClient(transport);
+        return client;
+    }
+
+    private ElasticsearchAsyncClient getElasticsearchAsyncClient(String host, int port) {
+        //创建 low-level client
+        RestClient restClient = RestClient.builder(new HttpHost(host, port)).build();
+        //创建一个Transport通信 和 一个JacksonJsonpMapper序列化实例
+        ElasticsearchTransport transport = new RestClientTransport(restClient, new JacksonJsonpMapper());
+
+        //得到一个es客户端
+        ElasticsearchAsyncClient client = new ElasticsearchAsyncClient(transport);
+        return client;
+    }
+
+    public ElasticsearchClient getElasticsearchClient() {
+        return this.esClient;
+    }
+
+    public void close() {
+    }
+}

+ 129 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/es/IndexService.java

@@ -0,0 +1,129 @@
+package cn.reghao.tnb.search.app.es;
+
+import co.elastic.clients.elasticsearch.ElasticsearchClient;
+import co.elastic.clients.elasticsearch._types.analysis.Analyzer;
+import co.elastic.clients.elasticsearch._types.analysis.StandardAnalyzer;
+import co.elastic.clients.elasticsearch._types.mapping.IntegerNumberProperty;
+import co.elastic.clients.elasticsearch._types.mapping.Property;
+import co.elastic.clients.elasticsearch._types.mapping.TypeMapping;
+import co.elastic.clients.elasticsearch.indices.*;
+import co.elastic.clients.elasticsearch.indices.get_mapping.IndexMappingRecord;
+import co.elastic.clients.transport.endpoints.BooleanResponse;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author reghao
+ * @date 2025-03-12 10:45:52
+ */
+@Slf4j
+@Service
+public class IndexService {
+    private final ElasticsearchClient esClient;
+
+    public IndexService(ElasticService elasticService) {
+        this.esClient = elasticService.getElasticsearchClient();
+    }
+
+    public void createIndex(String index, Map<String, Property> propertyMap) throws IOException {
+        String type = "_doc";
+        ExistsRequest existsRequest = new ExistsRequest.Builder()
+                .index(index)
+                .local(false)
+                .build();
+        BooleanResponse exists = esClient.indices().exists(existsRequest);
+        if(exists.value()) {
+            System.out.println("索引 " + index + " 已存在!");
+            return;
+        }
+
+        // 设置分词
+        IndexSettingsAnalysis indexSettingsAnalysis = new IndexSettingsAnalysis.Builder()
+                .analyzer("StandardAnalyzer", new Analyzer.Builder().standard(new StandardAnalyzer.Builder().build()).build())
+                .build();
+
+        IndexSettings indexSettings = new IndexSettings.Builder()
+                //.analysis(indexSettingsAnalysis)
+                .numberOfShards("10")
+                .numberOfReplicas("10")
+                //.refreshInterval(Time.of(t -> 5))
+                .build();
+        TypeMapping typeMapping = new TypeMapping.Builder()
+                .properties(propertyMap)
+                .build();
+        CreateIndexRequest createIndexRequest = new CreateIndexRequest.Builder()
+                .index(index)
+                .settings(indexSettings)
+                .mappings(typeMapping)
+                //.aliases("ngxlog", new Alias.Builder().isWriteIndex(true).build())
+                .build();
+        CreateIndexResponse createIndexResponse = esClient.indices().create(createIndexRequest);
+        if (createIndexResponse.acknowledged()) {
+            System.out.println("索引 " + index + " 创建成功!");
+        }
+    }
+
+    public void updateMapping(String index) throws IOException {
+        Map<String, Property> propertyMap = new HashMap<>();
+        Property intProp = Property.of(builder -> builder.integer(IntegerNumberProperty.of(pro -> pro.index(true))));
+        propertyMap.put("member", intProp);
+
+        /*Property keywordProp = Property.of(builder -> builder.keyword(KeywordProperty.of(pro -> pro.index(true))));
+        propertyMap.put("requestMethod", keywordProp);*/
+
+        /*Property textProp = Property.of(builder -> builder.text(TextProperty.of(pro -> pro.index(true).analyzer("ik_max_word"))));
+        propertyMap.put("requestUri", textProp);*/
+
+        PutMappingRequest putMappingRequest = PutMappingRequest.of(m -> m.index(index).properties(propertyMap));
+        PutMappingResponse putMappingResponse = esClient.indices().putMapping(putMappingRequest);
+        boolean acknowledged = putMappingResponse.acknowledged();
+        log.info("update mappings ack: {}", acknowledged);
+    }
+
+    public void deleteIndex(String index) throws IOException {
+        ExistsRequest existsRequest = new ExistsRequest.Builder()
+                .index(index)
+                .local(false)
+                .build();
+        BooleanResponse exists = esClient.indices().exists(existsRequest);
+        if(!exists.value()) {
+            System.out.println("索引 " + index + " 不存在!");
+            return;
+        }
+
+        DeleteIndexResponse response = esClient.indices().delete(i -> i.index(index));
+        System.out.println(response.acknowledged());
+    }
+
+    public void getMapping(String index) throws IOException {
+        GetMappingRequest request = GetMappingRequest.of(builder -> builder.index(index));
+        GetMappingResponse response = esClient.indices().getMapping(request);
+        Map<String, IndexMappingRecord> result = response.result();
+        log.info("{} mapping message: {}", index, result);
+    }
+
+    public void getIndex(String index) throws IOException {
+        List<String> indexName = new ArrayList<>();
+        indexName.add(index);
+        GetIndexRequest request = GetIndexRequest.of(builder -> builder.index(indexName));
+        GetIndexResponse getIndexResponse = esClient.indices().get(request);
+        Map<String, IndexState> result = getIndexResponse.result();
+        result.entrySet().forEach(entry -> {
+            log.info("key: {}, value: {}", entry.getKey(), entry.getValue());
+        });
+    }
+
+    public void existIndex() throws IOException {
+        List<String> indexName = new ArrayList<>();
+        indexName.add("user");
+        ExistsRequest request = ExistsRequest.of(builder -> builder.index(indexName));
+        BooleanResponse response = esClient.indices().exists(request);
+        log.info("exist: {}", response.value());
+    }
+}

+ 81 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/es/MappingService.java

@@ -0,0 +1,81 @@
+package cn.reghao.tnb.search.app.es;
+
+import co.elastic.clients.elasticsearch._types.mapping.*;
+import org.springframework.stereotype.Service;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * @author reghao
+ * @date 2025-03-12 11:11:07
+ */
+@Service
+public class MappingService {
+    Property booleanProp = Property.of(builder -> builder.boolean_(BooleanProperty.of(pro -> pro.index(true))));
+    Property intProp = Property.of(builder -> builder.integer(IntegerNumberProperty.of(pro -> pro.index(true))));
+    Property longProp = Property.of(builder -> builder.long_(LongNumberProperty.of(pro -> pro.index(true).store(true))));
+    Property doubleProp = Property.of(builder -> builder.double_(DoubleNumberProperty.of(pro -> pro.index(true).store(true))));
+    Property dateProp = Property.of(builder -> builder.date(DateProperty.of(pro -> pro.index(true))));
+    Property keywordProp = Property.of(builder -> builder.keyword(KeywordProperty.of(pro -> pro.index(true))));
+    Property textProp = Property.of(builder -> builder.text(TextProperty.of(pro -> pro.index(true))));
+    Property textPropIk = Property.of(builder -> builder.text(TextProperty.of(pro -> pro.index(true).analyzer("ik_max_word"))));
+    Property textKeywordProp = new Property(new TextProperty.Builder().index(true).fields("raw", keywordProp).store(true).build());
+
+    public Map<String, Property> getPropertyMap(Class<?> clazz) {
+        Field[] fields = clazz.getDeclaredFields();
+        for (Field field : fields) {
+            String name = field.getName();
+            Class<?> type = field.getType();
+        }
+
+        Map<String, Property> propertyMap = new HashMap<>();
+        propertyMap.put("id", keywordProp);
+        propertyMap.put("timeIso8601", dateProp);
+        propertyMap.put("remoteAddr", keywordProp);
+        propertyMap.put("request", textKeywordProp);
+        propertyMap.put("status", intProp);
+        propertyMap.put("requestMethod", keywordProp);
+        propertyMap.put("bodyBytesSent", intProp);
+        propertyMap.put("requestTime", doubleProp);
+        propertyMap.put("upstreamResponseTime", keywordProp);
+        propertyMap.put("upstreamAddr", keywordProp);
+        propertyMap.put("host", keywordProp);
+        propertyMap.put("url", textKeywordProp);
+        propertyMap.put("httpXForwardedFor", textKeywordProp);
+        propertyMap.put("httpReferer", textKeywordProp);
+        propertyMap.put("httpUserAgent", textKeywordProp);
+        return propertyMap;
+    }
+
+    public Map<String, Property> getWenshuPropertyMap() {
+        Map<String, Property> propertyMap = new HashMap<>();
+        propertyMap.put("id", keywordProp);
+        propertyMap.put("originalUrl", keywordProp);
+        propertyMap.put("caseId", textPropIk);
+        propertyMap.put("caseName", textPropIk);
+        propertyMap.put("court", keywordProp);
+        propertyMap.put("region", keywordProp);
+        propertyMap.put("caseType", keywordProp);
+        propertyMap.put("caseTypeId", intProp);
+        propertyMap.put("judgmentDate", keywordProp);
+        propertyMap.put("judgeDate", keywordProp);
+        propertyMap.put("publicDate", keywordProp);
+        propertyMap.put("parties", textPropIk);
+        propertyMap.put("cause", textPropIk);
+        propertyMap.put("legalBasis", textPropIk);
+        propertyMap.put("fullText", textPropIk);
+        return propertyMap;
+    }
+
+    public Map<String, Property> getVideoTextPropertyMap() {
+        Map<String, Property> propertyMap = new HashMap<>();
+        propertyMap.put("id", keywordProp);
+        propertyMap.put("video", keywordProp);
+        propertyMap.put("title", textPropIk);
+        propertyMap.put("description", textPropIk);
+        propertyMap.put("vip", booleanProp);
+        return propertyMap;
+    }
+}

+ 207 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/es/QueryService.java

@@ -0,0 +1,207 @@
+package cn.reghao.tnb.search.app.es;
+
+import co.elastic.clients.elasticsearch.ElasticsearchClient;
+import co.elastic.clients.elasticsearch._types.FieldValue;
+import co.elastic.clients.elasticsearch._types.query_dsl.BoolQuery;
+import co.elastic.clients.elasticsearch._types.query_dsl.Query;
+import co.elastic.clients.elasticsearch._types.query_dsl.QueryStringQuery;
+import co.elastic.clients.elasticsearch._types.query_dsl.TermQuery;
+import co.elastic.clients.elasticsearch.core.SearchRequest;
+import co.elastic.clients.elasticsearch.core.SearchResponse;
+import co.elastic.clients.elasticsearch.core.search.Highlight;
+import co.elastic.clients.elasticsearch.core.search.HighlightField;
+import co.elastic.clients.elasticsearch.core.search.Hit;
+import co.elastic.clients.elasticsearch.core.search.HitsMetadata;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.data.domain.Page;
+import org.springframework.data.domain.PageImpl;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.stereotype.Service;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * @author reghao
+ * @date 2025-03-12 11:16:48
+ */
+@Slf4j
+@Service
+public class QueryService<T> {
+    private final ElasticsearchClient esClient;
+
+    public QueryService(ElasticService elasticService) {
+        this.esClient = elasticService.getElasticsearchClient();
+    }
+
+    public Page<T> queryWithHighlight(String index, String queryString, int pn, int ps, Class<T> clazz) {
+        String highlightFieldName1 = "title";
+        String highlightFieldName = "caseName";
+        List<String> otherFiledNames = List.of("caseName", "cause", "parties");
+        List<String> otherFiledNames1 = List.of();
+
+        // 1.构建查询的对象
+        QueryStringQuery stringQuery = new QueryStringQuery.Builder()
+                // 查询的字段
+                .fields(highlightFieldName, otherFiledNames.toArray(new String[0]))
+                .query(queryString)
+                .build();
+        Query query = new Query.Builder()
+                .queryString(stringQuery)
+                .build();
+
+        Query query1 = new Query.Builder()
+                .bool(BoolQuery.of(b -> b
+                        .must(m -> m.term(t -> t.field("vip").value(FieldValue.of(""))))
+                        .must(m -> m.queryString(stringQuery))))
+                .build();
+
+        // 2.高亮显示
+        HighlightField highlightField = new HighlightField.Builder()
+                .matchedFields(highlightFieldName)
+                .preTags("<span style=\"color:red\">")
+                .postTags("</span>")
+                .build();
+        Highlight highlight = new Highlight.Builder()
+                .fields(highlightFieldName, highlightField)
+                .requireFieldMatch(false)
+                .build();
+
+        // 3.搜索请求
+        int start = (pn-1)*ps;
+        SearchRequest searchRequest = new SearchRequest.Builder()
+                .index(index)
+                .from(start)
+                .size(ps)
+                .query(query)
+                .highlight(highlight)
+                .build();
+        try {
+            SearchResponse<T> searchResponse = esClient.search(searchRequest, clazz);
+            HitsMetadata<T> hitsMetadata = searchResponse.hits();
+            long total = hitsMetadata.total().value();
+            List<T> list = hitsMetadata.hits().stream().map(mapper -> {
+                Map<String, List<String>> highlightMap = mapper.highlight();
+                String highlightStr = "";
+                if (!highlightMap.isEmpty()) {
+                    highlightStr = mapper.highlight()
+                            .get(highlightFieldName)
+                            .get(0);
+                }
+
+                T t = mapper.source();
+                try {
+                    Field field = clazz.getDeclaredField(highlightFieldName);
+                    field.setAccessible(true);
+
+                    if (t == null) {
+                        Object object = clazz.getDeclaredConstructors()[0].newInstance();
+                        field.set(object, highlightStr);
+                    } else {
+                        field.set(t, highlightStr);
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+                return mapper.source();
+            }).collect(Collectors.toList());
+
+            //return list;
+            PageRequest pageRequest = PageRequest.of(pn-1, ps);
+            return new PageImpl<>(list, pageRequest, total);
+        } catch (IOException e) {
+            log.error("search By Query Highlight error", e);
+        }
+
+        return Page.empty();
+    }
+
+    public Page<T> queryWithHighlight(String index, Query query, int pn, int ps, Class<T> clazz) {
+        String highlightFieldName = "title";
+        // 1.构建查询的对象
+        // 2.高亮显示
+        HighlightField highlightField = new HighlightField.Builder()
+                .matchedFields(highlightFieldName)
+                .preTags("<span style=\"color:red\">")
+                .postTags("</span>")
+                .build();
+        Highlight highlight = new Highlight.Builder()
+                .fields(highlightFieldName, highlightField)
+                .requireFieldMatch(false)
+                .build();
+
+        // 3.搜索请求
+        int start = (pn-1)*ps;
+        SearchRequest searchRequest = new SearchRequest.Builder()
+                .index(index)
+                .from(start)
+                .size(ps)
+                .query(query)
+                .highlight(highlight)
+                .build();
+        try {
+            SearchResponse<T> searchResponse = esClient.search(searchRequest, clazz);
+            HitsMetadata<T> hitsMetadata = searchResponse.hits();
+            long total = hitsMetadata.total().value();
+            List<T> list = hitsMetadata.hits().stream().map(mapper -> {
+                Map<String, List<String>> highlightMap = mapper.highlight();
+                String highlightStr = "";
+                if (!highlightMap.isEmpty()) {
+                    highlightStr = mapper.highlight()
+                            .get(highlightFieldName)
+                            .get(0);
+                }
+
+                T t = mapper.source();
+                try {
+                    Field field = clazz.getDeclaredField(highlightFieldName);
+                    field.setAccessible(true);
+
+                    if (t == null) {
+                        Object object = clazz.getDeclaredConstructors()[0].newInstance();
+                        field.set(object, highlightStr);
+                    } else {
+                        field.set(t, highlightStr);
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+                return mapper.source();
+            }).collect(Collectors.toList());
+
+            //return list;
+            PageRequest pageRequest = PageRequest.of(pn-1, ps);
+            return new PageImpl<>(list, pageRequest, total);
+        } catch (IOException e) {
+            log.error("search By Query Highlight error", e);
+        }
+
+        return Page.empty();
+    }
+
+    public T queryById(Class<T> clazz, String index, String id) {
+        Query query = TermQuery.of(t -> t
+                .field("id").value(FieldValue.of(id))
+        )._toQuery();
+
+        SearchRequest searchRequest = SearchRequest.of(s -> s
+                .index(index)
+                .query(query)
+        );
+
+        try {
+            SearchResponse<T> searchResponse = esClient.search(searchRequest, clazz);
+            List<Hit<T>> hits = searchResponse.hits().hits();
+            if (!hits.isEmpty()) {
+                return hits.get(0).source();
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+
+        return null;
+    }
+}

+ 434 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/es/SearchService.java

@@ -0,0 +1,434 @@
+package cn.reghao.tnb.search.app.es;
+
+import cn.reghao.tnb.search.app.model.po.NginxLog;
+import cn.reghao.tnb.search.app.model.po.VideoText;
+import co.elastic.clients.elasticsearch.ElasticsearchClient;
+import co.elastic.clients.elasticsearch._types.FieldValue;
+import co.elastic.clients.elasticsearch._types.ShardStatistics;
+import co.elastic.clients.elasticsearch._types.SortOrder;
+import co.elastic.clients.elasticsearch._types.aggregations.*;
+import co.elastic.clients.elasticsearch._types.query_dsl.*;
+import co.elastic.clients.elasticsearch.core.CountRequest;
+import co.elastic.clients.elasticsearch.core.CountResponse;
+import co.elastic.clients.elasticsearch.core.SearchRequest;
+import co.elastic.clients.elasticsearch.core.SearchResponse;
+import co.elastic.clients.elasticsearch.core.search.Hit;
+import co.elastic.clients.elasticsearch.core.search.HitsMetadata;
+import co.elastic.clients.elasticsearch.core.search.TotalHits;
+import co.elastic.clients.elasticsearch.core.search.TotalHitsRelation;
+import co.elastic.clients.json.JsonData;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * @author reghao
+ * @date 2025-03-12 10:51:21
+ */
+@Slf4j
+@Service
+public class SearchService {
+    private final ElasticsearchClient esClient;
+
+    public SearchService(ElasticService elasticService) throws Exception {
+        this.esClient = elasticService.getElasticsearchClient();
+    }
+
+    public void searchOne(String indexName, String searchText) throws IOException {
+        SearchResponse<NginxLog> searchResponse = esClient.search(s -> s
+                .index(indexName)
+                // 搜索请求的查询部分(搜索请求也可以有其他组件,如聚合)
+                .query(q -> q
+                        // 在众多可用的查询变体中选择一个。我们在这里选择匹配查询(全文搜索)
+                        .match(t -> t
+                                .field("name")
+                                .query(searchText))), NginxLog.class);
+        TotalHits total = searchResponse.hits().total();
+        boolean isExactResult = total != null && total.relation() == TotalHitsRelation.Eq;
+        if (isExactResult) {
+            log.info("search has: {} results", total.value());
+        } else {
+            log.info("search more than : {} results", total.value());
+        }
+        List<Hit<NginxLog>> hits = searchResponse.hits().hits();
+        for (Hit<NginxLog> hit : hits) {
+            NginxLog source = hit.source();
+            log.info("Found result: {}", source);
+        }
+    }
+
+    public List<NginxLog> searchByPage(String indexName, int pn, String searchField, String searchText) throws IOException {
+        int ps = 100;
+        String sortField = "timeIso8601";
+        Query query = RangeQuery.of(r -> r.field("age").gte(JsonData.of(8)))._toQuery();
+        Query query11 = getOrQuery();
+        Query query1 = getAndQuery();
+        Query query12 = getTermQuery();
+
+        int start = (pn-1)*ps;
+        SearchRequest searchRequest = SearchRequest.of(s -> s
+                .index(indexName)
+                .query(query1)
+                .from(start)
+                .size(ps)
+                // 按 id 字段降序排列
+                .sort(f -> f.field(o -> o.field(sortField).order(SortOrder.Desc)))
+        );
+
+        SearchResponse<NginxLog> searchResponse = esClient.search(searchRequest, NginxLog.class);
+        /*List<Hit<NginxLog>> hits = searchResponse.hits().hits();
+        for (Hit<NginxLog> hit : hits) {
+            NginxLog product = hit.source();
+            log.info("search page result: {}", product);
+        }*/
+        return searchResponse.hits().hits().stream().map(Hit::source).collect(Collectors.toList());
+    }
+
+    public void searchAll(String indexName) throws IOException {
+        String fieldName1 = "url.raw";
+        String fieldValue1 = "/";
+
+        String fieldName2 = "requestMethod.raw";
+        String fieldValue2 = "POST";
+
+        String fieldName3 = "status";
+        String fieldValue3 = "200";
+
+        String fieldName4 = "vip";
+        Boolean fieldValue4 = false;
+
+        String keyword = "";
+        QueryStringQuery stringQuery = new QueryStringQuery.Builder()
+                // 查询的字段
+                .fields("title")
+                .query(keyword)
+                .build();
+        Query query = new Query.Builder()
+                .bool(BoolQuery.of(b -> b
+                        .must(m -> m.term(t -> t.field("vip").value(FieldValue.of(true))))
+                        .must(m -> m.queryString(stringQuery))))
+                .build();
+
+        SearchRequest searchRequest = SearchRequest.of(s -> s
+                .index(indexName)
+                .query(q -> q.bool(b -> b
+                                .must(m -> m.term(t -> t.field(fieldName4).value(FieldValue.of(fieldValue4))))
+                        //.must(m -> m.term(t -> t.field("name").value(FieldValue.of("test"))))
+                ))
+        );
+
+        String keyword1 = "大奶";
+        SearchRequest searchRequest1 = SearchRequest.of(s -> s
+                .index(indexName)
+                .query(q -> q.bool(b -> b
+                        .must(m -> m.term(t -> t.field(fieldName4).value(FieldValue.of(fieldValue4))))
+                        .must(m -> m.queryString(t -> t.fields("title").query(keyword1)))
+                ))
+        );
+
+        SearchRequest searchRequest2 = SearchRequest.of(s -> s
+                .index(indexName)
+        );
+
+        //SearchResponse<NginxLog> searchResponse = esClient.search(searchRequest, NginxLog.class);
+        //List<NginxLog> list = searchResponse.hits().hits().stream().map(Hit::source).collect(Collectors.toList());
+        SearchResponse<VideoText> searchResponse = esClient.search(searchRequest2, VideoText.class);
+        List<VideoText> list = searchResponse.hits().hits().stream().map(Hit::source).collect(Collectors.toList());
+        System.out.println();
+    }
+
+    public void searchByField(String indexName, String fieldName, String fieldValue) throws IOException {
+        SearchRequest request = SearchRequest.of(s -> s
+                .index(indexName)
+                .query(query -> query.match(match -> match.field(fieldName).query(fieldValue)))
+        );
+
+        SearchResponse<NginxLog> searchResponse = esClient.search(request, NginxLog.class);
+        log.info("search result: {}", searchResponse);
+    }
+
+    public void search(String index) throws IOException {
+        String fieldName = "url";
+        String fieldValue = "/api";
+        // 构造查询条件
+        SearchRequest searchRequest = SearchRequest.of(s -> s.index(index)
+                /*.sort(s1 -> s1.field(f -> f.field(fieldName).order(SortOrder.Asc)))
+                .scroll(s2 -> s2.offset(0))*/
+                .from(0)
+                .size(500)
+                .query(q -> q.match(m -> m.field(fieldName).query(FieldValue.of(fieldValue))))
+        );
+
+        SearchResponse<NginxLog> searchResponse = esClient.search(searchRequest, NginxLog.class);
+        HitsMetadata<NginxLog> hitsMetadata = searchResponse.hits();
+        //得到总数
+        TotalHits totalHits = hitsMetadata.total();
+        Double maxScore = hitsMetadata.maxScore();
+        //拿到匹配的数据
+        List<Hit<NginxLog>> hits = hitsMetadata.hits();
+        //拿到_source中的数据
+        List<NginxLog> nginxLogs = hits.stream().map(Hit::source).collect(Collectors.toList());
+
+        //最大分数
+        System.out.println(searchResponse.maxScore());
+        //分片数
+        System.out.println(searchResponse.shards());
+        //是否超时
+        System.out.println(searchResponse.timedOut());
+    }
+
+    public void search() throws IOException {
+        Query query = getExistsQuery();
+
+        String index = "nginx_log";
+        SearchRequest searchRequest = new SearchRequest.Builder()
+                .index(index)
+                .query(query)
+                .build();
+        SearchResponse<NginxLog> searchResponse = esClient.search(searchRequest, NginxLog.class);
+        TotalHits totalHits = searchResponse.hits().total();
+        List<NginxLog> list = searchResponse.hits().hits().stream().map(Hit::source).collect(Collectors.toList());
+    }
+
+    public void count(String index) throws IOException {
+        Query query1 = getMatchQuery();
+        // remoteAddr 字段为空
+        Query query2 = BoolQuery.of(b ->
+                b.mustNot(m -> m.exists(t -> t.field("remoteAddr"))
+                ))._toQuery();
+        // remoteAddr 字段不为空
+        Query query3 = BoolQuery.of(b ->
+                b.must(m -> m.exists(t -> t.field("remoteAddr"))
+                ))._toQuery();
+
+        CountRequest countRequest = CountRequest.of(s -> s
+                        .index(index)
+                //.query(query3)
+        );
+
+        CountResponse countResponse = esClient.count(countRequest);
+        long total = countResponse.count();
+        ShardStatistics shardStatistics = countResponse.shards();
+        System.out.println("total -> " + total);
+    }
+
+    /**
+     * 等值查询
+     *
+     * @param
+     * @return
+     * @date 2025-03-12 13:45:17
+     */
+    public Query getTermQuery() {
+        String fieldName1 = "url";
+        String fieldValue1 = "/datareceive/ReceiveData/SendContentResult\"";
+
+        String fieldName2 = "requestMethod";
+        String fieldValue2 = "POST";
+
+        Query query = TermQuery.of(t -> t
+                .field(fieldName1).value(FieldValue.of(fieldValue1))
+        )._toQuery();
+        return query;
+    }
+
+    /**
+     * MatchQuery 搜索时, 首先会解析查询字符串, 进行分词,然后查询
+     * TermQuery 搜索时, 会根据输入的查询内容进行搜索, 并不会解析查询内容,对它分词
+     *
+     * @param
+     * @return
+     * @date 2025-03-12 15:04:22
+     */
+    public Query getMatchQuery() {
+        String fieldName = "host";
+        String searchText = "api.iquizoo.com";
+
+        Query query = MatchQuery.of(m -> m.field(fieldName).query(searchText))._toQuery();
+        return query;
+    }
+
+    /**
+     * 多值查询, 相当于 SQL 语句中的 in 查询
+     *
+     * @param
+     * @return
+     * @date 2025-03-12 14:12:38
+     */
+    public Query getTermsQuery() {
+        String fieldName = "status";
+        List<FieldValue> fieldValueList = List.of(FieldValue.of("401"), FieldValue.of("500"));
+
+        Query query = TermsQuery.of(t ->
+                t.field(fieldName).terms(TermsQueryField.of(q -> q.value(fieldValueList)))
+        )._toQuery();
+        return query;
+    }
+
+    /**
+     * 或查询, 相当于 SQL 查询
+     * SELECT * FROM test1 where (uid = 1 or uid =2) and phone = 12345678919
+     *
+     * @param
+     * @return
+     * @date 2025-03-12 13:46:26
+     */
+    public Query getOrQuery() {
+        Query query = BoolQuery.of(b ->
+                b.should(m -> m.term(t -> t.field("status").value(FieldValue.of(401))))
+                        .should(m -> m.term(t -> t.field("status").value(FieldValue.of(403))))
+                        .must(m -> m.term(t -> t.field("host").value(FieldValue.of("api.iquizoo.com"))))
+        )._toQuery();
+        return query;
+    }
+
+    public Query getAndQuery() {
+        String fieldName1 = "url.raw";
+        String fieldValue1 = "/datareceive/ReceiveData/SendContentResult";
+
+        String fieldName2 = "requestMethod";
+        String fieldValue2 = "POST";
+
+        Query query = BoolQuery.of(b -> b
+                .must(m -> m.term(t -> t.field(fieldName1).value(FieldValue.of(fieldValue1))))
+                .must(m -> m.term(t -> t.field(fieldName2).value(FieldValue.of(fieldValue2))))
+        )._toQuery();
+        return query;
+    }
+
+    /**
+     * 模糊查询, 相当于 SQL 语句中的 like 查询
+     *
+     * @param
+     * @return
+     * @date 2025-03-12 14:11:45
+     */
+    public Query getWildcardQuery() {
+        Query query = BoolQuery.of(b ->
+                b.must(m -> m.wildcard(t -> t.field("url").value("*result*")))
+        )._toQuery();
+        return query;
+    }
+
+    /**
+     * 存在查询, 相当于 SQL 语句中的 exist
+     *
+     * @param
+     * @return
+     * @date 2025-03-12 14:20:26
+     */
+    public Query getExistsQuery() {
+        // 判断字段是否存在
+        Query query = ExistsQuery.of(t -> t.field("host"))._toQuery();
+        return query;
+    }
+
+    /**
+     * 范围查询, 相当于 SQL 语句中的 > 和 <
+     * gt 是大于,lt 是小于,gte 是大于等于,lte 是小于等于
+     *
+     * @param
+     * @return
+     * @date 2025-03-12 14:21:26
+     */
+    public Query getRangeQuery() {
+        int status1 = 404;
+        int status2 = 600;
+        Query query = RangeQuery.of(t -> t.field("status").gte(JsonData.of(status1)).lte(JsonData.of(status2)))._toQuery();
+        return query;
+    }
+
+    /**
+     * 正则查询
+     *
+     * @param
+     * @return
+     * @date 2025-03-12 14:22:18
+     */
+    public Query getRegexpQuery() {
+        Query query = RegexpQuery.of(t -> t.field("host").value("api.*"))._toQuery();
+        return query;
+    }
+
+    public Query getQuery() {
+        MatchAllQuery.of(m -> m.queryName("host"))._toQuery();
+        // 一般情况下有一个单词错误的情况下,fuzzy 查询可以找到另一个近似的词来代替,主要有以下场景:
+        //
+        //修改一个单词,如:box--->fox。
+        //移除一个单词,如:black-->lack。
+        //插入一个单词,如:sic-->sick。
+        //转换两个单词顺序,如:act-->cat。
+        FuzzyQuery.of(f -> f.field("host").value("lonel"));
+        // 通过指定字段的前缀进行查询
+        Query query = PrefixQuery.of(p -> p.field("host").value("api"))._toQuery();
+        return query;
+    }
+
+    /**
+     * 聚合查询, 相当于 SQL 的 group by
+     *
+     * @param
+     * @return
+     * @date 2025-03-12 15:06:48
+     */
+    public void aggregate(String index) throws Exception {
+        String aggField1 = "status";
+        String aggField2 = "remoteAddr";
+        String aggField3 = "httpUserAgent.raw";
+        String aggField4 = "url.raw";
+        String aggField5 = "timeIso8601";
+
+        String fieldName1 = "url.raw";
+        String fieldValue1 = "/base/Device/PageList";
+
+        SearchRequest searchRequest = new SearchRequest.Builder()
+                .index(index)
+                .query(q -> q.bool(b -> b.must(m -> m.term(t -> t.field(fieldName1).value(FieldValue.of(fieldValue1))))))
+                .size(10000)
+                .aggregations("first_agg", a->a.terms(t->t.field(aggField5).size(65535)))
+                //.aggregations("agg1", a->a.histogram(t->t.field("httpUserAgent").interval(50.0)))
+                //.aggregations("agg1", a->a.sum(t->t.field("httpUserAgent")))
+                //.aggregations("second_agg", a->a.avg(t->t.field("status")))
+                .build();
+
+        SearchResponse<NginxLog> searchResponse = esClient.search(searchRequest, NginxLog.class);
+        TotalHits totalHits = searchResponse.hits().total();
+        Map<String, Aggregate> resultMap = searchResponse.aggregations();
+
+        List<Long> countList = new ArrayList<>();
+        resultMap.forEach((k, v) -> {
+            Object value = v._get();
+            if (value instanceof StringTermsAggregate) {
+                StringTermsAggregate terms = (StringTermsAggregate) value;
+                List<StringTermsBucket> list = terms.buckets().array();
+                list.forEach(bucket -> {
+                    String groupKey = (String) bucket.key()._get();
+                    long count = bucket.docCount();
+                    countList.add(count);
+                    //System.out.println(groupKey + " : " + IpTool.getLocation(groupKey) + " -> " + count);
+                    System.out.println(groupKey + " : " + count);
+                });
+                System.out.println("bucket size = " + list.size());
+            } else if (value instanceof LongTermsAggregate) {
+                LongTermsAggregate terms = (LongTermsAggregate) value;
+                List<LongTermsBucket> list = terms.buckets().array();
+                list.forEach(bucket -> {
+                    String groupKey = bucket.key();
+                    String groupKeyStr = bucket.keyAsString();
+                    long count = bucket.docCount();
+                    countList.add(count);
+                    System.out.println(groupKeyStr + " : " + count);
+                });
+            } else {
+                System.out.println(value);
+            }
+        });
+
+        System.out.println("total = " + countList.stream().mapToLong(Long::longValue).sum());
+    }
+}

+ 208 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/ip/IPLocation.java

@@ -0,0 +1,208 @@
+package cn.reghao.tnb.search.app.ip;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteOrder;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * @Description:ip定位查找工具(使用内存映射文件方式读取,线程安全)
+ * @author:difeng
+ * @date:2016年12月11日
+ */
+public class IPLocation {
+
+	private static final int IP_RECORD_LENGTH = 7;
+
+	private static final byte REDIRECT_MODE_1 = 0x01;
+
+	private static final byte REDIRECT_MODE_2 = 0x02;
+
+	private MappedByteBuffer mbbFile;
+
+	private static Long lastModifyTime = 0L;
+
+	public static boolean enableFileWatch = false;
+
+	private static ReentrantLock lock = new ReentrantLock();
+
+	private File qqwryFile;
+
+	private long firstIndexOffset;
+
+	private long lastIndexOffset;
+
+	private long totalIndexCount;
+
+	public IPLocation(String filePath) throws Exception {
+		this.qqwryFile = new File(filePath);
+		load();
+		if (enableFileWatch) {
+			watch();
+		}
+	}
+
+	private void watch(){
+		Executors.newScheduledThreadPool(1).scheduleAtFixedRate(new Runnable() {
+			@Override
+			public void run() {
+				long time = qqwryFile.lastModified();
+				if (time > lastModifyTime) {
+					lastModifyTime = time;
+					try {
+						load();
+					} catch (Exception e) {
+						e.printStackTrace();
+					}
+				}
+			}
+		}, 1000L, 30000L, TimeUnit.MILLISECONDS);
+	}
+
+	public long read4ByteAsLong(long pos) {
+		mbbFile.position((int)pos);
+		return 0xFFFFFFFFL & mbbFile.getInt();
+	}
+
+	public long read3ByteAsLong(long pos){
+		mbbFile.position((int)pos);
+		return 0xFFFFFFL & mbbFile.getInt();
+	}
+
+
+	@SuppressWarnings("resource")
+	private void load() throws Exception {
+		lastModifyTime = qqwryFile.lastModified();
+		lock.lock();
+		try {
+			mbbFile =  new RandomAccessFile(qqwryFile, "r")
+					.getChannel()
+					.map(FileChannel.MapMode.READ_ONLY, 0, qqwryFile.length());
+			mbbFile.order(ByteOrder.LITTLE_ENDIAN);
+			firstIndexOffset = read4ByteAsLong(0);
+			lastIndexOffset = read4ByteAsLong(4);
+			totalIndexCount = (lastIndexOffset - firstIndexOffset) / IP_RECORD_LENGTH + 1;
+		} finally {
+			lock.unlock();
+		}
+	}
+	
+	/**
+	 * @Description:将“.”号分隔的字符串转换为long类型的数字,字节序例如:
+	 *   ip:182.92.240.48  16进制表示(B6.5C.F0.30)
+	 *   转换后ip的16进制表示:0xB65CF030
+	 * @param ipStr
+	 * @return:long
+	 */
+	private static long inet_pton(String ipStr) {
+		if(ipStr == null){
+			throw new NullPointerException("ip不能为空");
+		}
+		String [] arr = ipStr.split("\\.");
+		long ip = (Long.parseLong(arr[0])  & 0xFFL) << 24 & 0xFF000000L;
+		ip |=  (Long.parseLong(arr[1])  & 0xFFL) << 16 & 0xFF0000L;
+		ip |=  (Long.parseLong(arr[2])  & 0xFFL) << 8 & 0xFF00L;
+		ip |=  (Long.parseLong(arr[3])  & 0xFFL);
+		return ip;
+	}
+
+	private long search(long ip) {
+		long low = 0;
+		long high = totalIndexCount;
+		long mid = 0;
+		while(low <= high) {
+			mid = (low + high) >>> 1 ;
+		    long indexIP = read4ByteAsLong(firstIndexOffset + (mid - 1) * IP_RECORD_LENGTH);
+		    long nextIndexIP =  read4ByteAsLong(firstIndexOffset + mid * IP_RECORD_LENGTH);
+		    if(indexIP <= ip && ip < nextIndexIP) {
+		    	return read3ByteAsLong(firstIndexOffset + (mid - 1) * IP_RECORD_LENGTH + 4);
+		    } else {
+		    	if(ip > indexIP) {
+		    		low = mid + 1;
+		    	} else if(ip < indexIP) {
+		    		high = mid - 1;
+		    	}
+		    }
+		}
+		return -1;
+	}
+
+	private Location readIPLocation(long offset) {
+		try {
+			mbbFile.position((int)offset + 4);
+			Location loc = new Location();
+			byte redirectMode = mbbFile.get();
+			if (redirectMode == REDIRECT_MODE_1) {
+				long countryOffset = read3ByteAsLong((int)offset + 5);
+				mbbFile.position((int)countryOffset);
+				redirectMode = mbbFile.get();
+				if (redirectMode == REDIRECT_MODE_2) {
+					loc.country = readString(read3ByteAsLong(countryOffset + 1));
+					mbbFile.position((int)countryOffset + 4);
+				} else {
+					loc.country = readString(countryOffset);
+				}
+				loc.area = readArea(mbbFile.position());
+			} else if (redirectMode == REDIRECT_MODE_2) {
+				loc.country = readString(read3ByteAsLong((int)offset + 5));
+				loc.area = readArea((int)offset + 8);
+			} else {
+				loc.country = readString(mbbFile.position() - 1);
+				loc.area = readArea(mbbFile.position());
+			}
+			return loc;
+		} catch (Exception e) {
+			return null;
+		}
+	}
+
+	private String readArea(int offset) {
+		mbbFile.position(offset);
+		byte redirectMode = mbbFile.get();
+		if (redirectMode == REDIRECT_MODE_1 || redirectMode == REDIRECT_MODE_2) {
+			long areaOffset = read3ByteAsLong((int)offset + 1);
+			if (areaOffset == 0){
+				return "";
+			} else {
+				return readString(areaOffset);
+			}
+		} else {
+			return readString(offset);
+		}
+	}
+
+	private String readString(long offset) {
+		try {
+			mbbFile.position((int)offset);
+			byte[] buf = new byte[128];
+			int i;
+			for (i = 0, buf[i] = mbbFile.get(); buf[i] != 0; buf[++i] = mbbFile.get());
+			
+			if (i != 0){
+			    return new String(buf, 0, i, "GBK");
+			}
+		} catch (IOException e) {
+			e.printStackTrace();
+		}
+		return "";
+	}
+
+	public  Location fetchIPLocation(String ip) {
+		lock.lock();
+		try {
+			long offset = search(inet_pton(ip));
+			if(offset != -1){
+				return readIPLocation(offset);
+			}
+		} finally {
+			lock.unlock();
+		}
+		return null;
+	}
+}
+

+ 23 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/ip/IpTool.java

@@ -0,0 +1,23 @@
+package cn.reghao.tnb.search.app.ip;
+
+/**
+ * @author reghao
+ * @date 2025-03-12 17:16:31
+ */
+public class IpTool {
+    static String filePath = "/home/reghao/Downloads/qqwry.dat";
+    static IPLocation ipLocation;
+    static {
+        try {
+            ipLocation = new IPLocation(filePath);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+
+    public static String getLocation(String ip) {
+        Location loc = ipLocation.fetchIPLocation(ip);
+        return loc.toString();
+    }
+}

+ 19 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/ip/Location.java

@@ -0,0 +1,19 @@
+package cn.reghao.tnb.search.app.ip;
+/**
+ * @Description:位置
+ * @author:difeng
+ * @date:2016年12月13日
+ */
+public class Location {
+	
+    public String country;
+    
+    public String area;
+    
+	@Override
+	public String toString() {
+		return "Location [country=" + country + ", area=" + area + "]";
+	}
+    
+}
+

+ 60 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/lucene/LuceneDocument.java

@@ -0,0 +1,60 @@
+package cn.reghao.tnb.search.app.lucene;
+
+import cn.reghao.tnb.search.app.model.po.VideoText;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.TextField;
+import org.springframework.stereotype.Service;
+
+import java.lang.reflect.Field;
+
+/**
+ * @author reghao
+ * @date 2025-04-02 11:12:48
+ */
+@Service
+public class LuceneDocument {
+    public Object getObject(Class<?> clazz, Document document) throws Exception {
+        Field[] fields = clazz.getDeclaredFields();
+        Object object = clazz.getDeclaredConstructor().newInstance();
+        for (int i = 0; i < fields.length; i++) {
+            String name = fields[i].getName();
+            String value = document.get(name);
+            if (value == null) {
+                continue;
+            }
+
+            Class<?> clazzType = fields[i].getType();
+            if (clazzType.equals(String.class)) {
+                fields[i].setAccessible(true);
+                fields[i].set(object, value);
+            } else {
+                Object result = null;
+                if (clazzType.equals(Long.class)) {
+                    result = Long.parseLong(value);
+                } else if (clazzType.equals(Double.class)) {
+                    result = Double.parseDouble(value);
+                } else if (clazzType.equals(Integer.class)) {
+                    result = Integer.parseInt(value);
+                } else if (clazzType.equals(Boolean.class)) {
+                    result = Boolean.parseBoolean(value);
+                }
+
+                fields[i].setAccessible(true);
+                fields[i].set(object, result);
+            }
+        }
+
+        return object;
+    }
+
+    public Document getVideoTextDoc(VideoText videoText) {
+        Document doc = new Document();
+        doc.add(new StringField("id", videoText.getId(), org.apache.lucene.document.Field.Store.YES));
+        doc.add(new StringField("videoId", videoText.getVideoId(), org.apache.lucene.document.Field.Store.YES));
+        doc.add(new TextField("title", videoText.getTitle(), org.apache.lucene.document.Field.Store.YES));
+        doc.add(new TextField("description", videoText.getDescription(), org.apache.lucene.document.Field.Store.NO));
+        doc.add(new StringField("vip", ""+videoText.getVip(), org.apache.lucene.document.Field.Store.YES));
+        return doc;
+    }
+}

+ 160 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/lucene/LuceneIndex.java

@@ -0,0 +1,160 @@
+package cn.reghao.tnb.search.app.lucene;
+
+import cn.reghao.tnb.search.app.model.po.VideoText;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.*;
+import org.apache.lucene.search.*;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.springframework.stereotype.Service;
+import org.wltea.analyzer.lucene.IKAnalyzer;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.List;
+
+/**
+ * @author reghao
+ * @date 2023-03-02 09:28:58
+ */
+@Service
+public class LuceneIndex {
+    private final LuceneDocument luceneDocument;
+    private final String indexDirPath;
+    private final Analyzer luceneAnalyzer = new IKAnalyzer();
+    private final Directory directory;
+    private final IndexWriter indexWriter;
+
+    public LuceneIndex(LuceneDocument luceneDocument) throws IOException {
+        this.luceneDocument = luceneDocument;
+        this.indexDirPath = "/opt/data/bntdata/jsearch";
+        this.directory = FSDirectory.open(Paths.get(indexDirPath));
+        this.indexWriter = getIndexWriter();
+    }
+
+    public IndexWriter getIndexWriter() throws IOException {
+        File indexDir = new File(indexDirPath);
+        Directory indexDirectory = FSDirectory.open(indexDir.toPath());
+
+        IndexWriterConfig indexWriterConfig = new IndexWriterConfig(luceneAnalyzer);
+        return new IndexWriter(indexDirectory, indexWriterConfig);
+    }
+
+    private IndexReader getIndexReader() throws IOException {
+        Directory directory = FSDirectory.open(Paths.get(indexDirPath));
+        IndexReader indexReader = DirectoryReader.open(directory);
+        return indexReader;
+    }
+
+    public void createVideoTextIndex(VideoText videoText) throws IOException {
+        Document document = luceneDocument.getVideoTextDoc(videoText);
+        indexWriter.addDocument(document);
+        indexWriter.commit();
+    }
+
+    public void updateIndex(String videoId, Document document) throws IOException {
+        if (document != null) {
+            // Lucene 没有提供更新接口, 更新操作实际是先删除后再添加
+            indexWriter.updateDocument(new Term("videoId", videoId), document);
+            indexWriter.commit();
+        }
+    }
+
+    public Document getDocument(String videoId) throws IOException {
+        IndexReader indexReader = DirectoryReader.open(directory);
+        Query query = new TermQuery(new Term("videoId", videoId));
+        IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+        TopDocs topDocs = indexSearcher.search(query, 1);
+        ScoreDoc[] scoreDocs = topDocs.scoreDocs;
+        if (scoreDocs.length == 1) {
+            Document document = indexReader.document(scoreDocs[0].doc);
+            return document;
+        }
+
+        return null;
+    }
+
+    public void deleteIndex(String videoId) throws IOException {
+        indexWriter.deleteDocuments(new Term("videoId", videoId));
+        indexWriter.commit();
+    }
+
+    public void createIndex(Document document) throws IOException {
+        IndexWriter indexWriter = getIndexWriter();
+        indexWriter.addDocument(document);
+        indexWriter.commit();
+        indexWriter.close();
+    }
+
+    public void createIndex(List<Document> list) throws IOException {
+        IndexWriter indexWriter = getIndexWriter();
+        list.forEach(doc -> {
+            try {
+                indexWriter.addDocument(doc);
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        });
+
+        indexWriter.commit();
+        indexWriter.close();
+    }
+
+    public void updateIndex(Document document) throws IOException {
+        Directory directory = FSDirectory.open(Paths.get(indexDirPath));
+        IndexWriterConfig config = new IndexWriterConfig(luceneAnalyzer);
+        IndexWriter indexWriter = new IndexWriter(directory, config);
+
+        indexWriter.updateDocument(new Term("name", "李白"), document);
+        indexWriter.commit();
+        indexWriter.close();
+    }
+
+    public void deleteIndex() throws IOException {
+        Directory directory = FSDirectory.open(Paths.get(indexDirPath));
+        Analyzer analyzer = new IKAnalyzer();
+        IndexWriterConfig config = new IndexWriterConfig(analyzer);
+        IndexWriter indexWriter = new IndexWriter(directory, config);
+
+        indexWriter.deleteDocuments(new Term("name", "李白"));
+        indexWriter.commit();
+        indexWriter.close();
+    }
+
+    public void deleteAllIndex() throws IOException {
+        Directory directory = FSDirectory.open(Paths.get(indexDirPath));
+        Analyzer analyzer = new IKAnalyzer();
+        IndexWriterConfig config = new IndexWriterConfig(analyzer);
+        IndexWriter indexWriter = new IndexWriter(directory, config);
+
+        indexWriter.deleteAll();
+        indexWriter.commit();
+        indexWriter.close();
+    }
+
+    public void check() throws IOException {
+        IndexReader indexReader = DirectoryReader.open(directory);
+        // 总共的索引文档
+        int maxDocs = indexReader.maxDoc();
+        // 有效的索引文档
+        int numDocs = indexReader.numDocs();
+        // 被标记删除的索引文档
+        int numDeletedDocs = indexReader.numDeletedDocs();
+    }
+
+    public void restore() {
+    }
+
+    /**
+     * 彻底删除文档
+     *
+     * @param
+     * @return
+     * @date 2025-04-02 11:31:44
+     */
+    public void forceDelete() throws IOException {
+        indexWriter.forceMergeDeletes();
+    }
+}

+ 323 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/lucene/LuceneQuery.java

@@ -0,0 +1,323 @@
+package cn.reghao.tnb.search.app.lucene;
+
+import cn.reghao.tnb.search.app.model.po.VideoText;
+import cn.reghao.tnb.search.app.model.vo.SearchResult;
+import cn.reghao.jutil.jdk.db.PageList;
+import cn.reghao.tnb.search.app.model.vo.VideoCard;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryparser.classic.MultiFieldQueryParser;
+import org.apache.lucene.queryparser.classic.ParseException;
+import org.apache.lucene.queryparser.classic.QueryParser;
+import org.apache.lucene.search.*;
+import org.apache.lucene.search.highlight.Formatter;
+import org.apache.lucene.search.highlight.*;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.springframework.data.domain.Page;
+import org.springframework.data.domain.PageImpl;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.stereotype.Service;
+import org.wltea.analyzer.lucene.IKAnalyzer;
+
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.*;
+
+/**
+ * @author reghao
+ * @date 2023-03-02 09:29:04
+ */
+@Slf4j
+@Service
+public class LuceneQuery {
+    private final static String indexDirPath = "/opt/data/bntdata/jsearch";
+    private final Analyzer luceneAnalyzer;
+    private final SimpleHTMLFormatter formatter;
+    private final Directory directory;
+
+    public LuceneQuery() throws IOException {
+        this.luceneAnalyzer = new IKAnalyzer();
+        this.formatter = new SimpleHTMLFormatter("<span style='color:red;'>", "</span>");
+        this.directory = FSDirectory.open(Paths.get(indexDirPath));
+    }
+
+    private IndexReader getIndexReader() throws IOException {
+        Directory directory = FSDirectory.open(Paths.get(indexDirPath));
+        IndexReader indexReader = DirectoryReader.open(directory);
+        return indexReader;
+    }
+
+    public Page<VideoText> queryWithHighlight(String index, String queryString, Integer pn, Integer ps) {
+        try {
+            IndexReader indexReader = DirectoryReader.open(directory);
+            IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+            String field = "title";
+            QueryParser queryParser = new QueryParser(field, luceneAnalyzer);
+            Query query = queryParser.parse(queryString);
+
+            long total;
+            TopDocs topDocs;
+            if (pn == 1) {
+                topDocs = indexSearcher.search(query, ps);
+                //topDocs = indexSearcher.searchAfter(null, query, pageSize);
+                total = topDocs.totalHits.value;
+            } else {
+                int count = (pn-1)*ps;
+                TopDocs prevTopDocs = indexSearcher.searchAfter(null, query, count);
+                total = prevTopDocs.totalHits.value;
+
+                ScoreDoc[] prevScoreDocs = prevTopDocs.scoreDocs;
+                ScoreDoc after = prevScoreDocs[prevScoreDocs.length-1];
+                topDocs = indexSearcher.searchAfter(after, query, ps);
+            }
+
+            List<VideoText> list = new ArrayList<>();
+            ScoreDoc[] scoreDocs = topDocs.scoreDocs;
+            QueryScorer queryScorer = new QueryScorer(query);
+            Highlighter highlighter = new Highlighter(formatter, queryScorer);
+            for (ScoreDoc scoreDoc : scoreDocs) {
+                Document document = indexReader.document(scoreDoc.doc);
+                String videoId = document.get("videoId");
+                String title = document.get("title");
+                String htmlTitle = highlighter.getBestFragment(luceneAnalyzer, field, title);
+                boolean vip = Boolean.parseBoolean(document.get("vip"));
+                list.add(new VideoText(videoId, htmlTitle, vip));
+            }
+
+            PageRequest pageRequest = PageRequest.of(pn-1, ps);
+            return new PageImpl<>(list, pageRequest, total);
+        } catch (IOException | ParseException | InvalidTokenOffsetsException e) {
+            e.printStackTrace();
+        }
+
+        return Page.empty();
+    }
+
+    public Document getDocument(String videoId) {
+        try {
+            IndexReader indexReader = DirectoryReader.open(directory);
+            IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+
+            Query query = new TermQuery(new Term("videoId", videoId));
+            TopDocs topDocs = indexSearcher.search(query, 1);
+            ScoreDoc[] scoreDocs = topDocs.scoreDocs;
+            if (scoreDocs.length == 1) {
+                Document document = indexReader.document(scoreDocs[0].doc);
+                return document;
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        return null;
+    }
+
+    public PageList<VideoCard> searchByTitle(String keyword, int pageSize, int pageNumber) {
+        try {
+            SearchResult searchResult = search(keyword, pageSize, pageNumber);
+            long total = searchResult.getTotal();
+            Map<String, String> result = searchResult.getResult();
+            Set<String> videoIds = result.keySet();
+            if (!videoIds.isEmpty()) {
+                /*List<VideoCard> list = redisHash.multiGet("video:card:hash", videoIds);
+                return PageList.pageList(pageNumber, pageSize, (int) total, list);*/
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        return PageList.empty();
+    }
+
+    public SearchResult search(String keyword, int pageSize, int pageNumber)
+            throws IOException, InvalidTokenOffsetsException, ParseException {
+        Directory directory = FSDirectory.open(Paths.get(indexDirPath));
+        IndexReader indexReader = DirectoryReader.open(directory);
+        IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+
+        String field = "title";
+        Analyzer analyzer = new StandardAnalyzer();
+        QueryParser queryParser = new QueryParser(field, analyzer);
+        Query query = queryParser.parse(keyword);
+        Query query1 = new TermQuery(new Term(field, keyword));
+
+        long total;
+        TopDocs topDocs;
+        if (pageNumber == 1) {
+            // topDocs = indexSearcher.search(query, pageSize);
+            topDocs = indexSearcher.searchAfter(null, query, pageSize);
+            total = topDocs.totalHits.value;
+        } else {
+            int count = (pageNumber-1)*pageSize;
+            TopDocs prevTopDocs = indexSearcher.searchAfter(null, query, count);
+            total = prevTopDocs.totalHits.value;
+
+            ScoreDoc[] prevScoreDocs = prevTopDocs.scoreDocs;
+            ScoreDoc after = prevScoreDocs[prevScoreDocs.length-1];
+            topDocs = indexSearcher.searchAfter(after, query, pageSize);
+        }
+
+        ScoreDoc[] scoreDocs = topDocs.scoreDocs;
+        Map<String, String> map = new HashMap<>();
+        Formatter formatter = new SimpleHTMLFormatter("<em>", "</em>");
+        QueryScorer queryScorer = new QueryScorer(query);
+        Highlighter highlighter = new Highlighter(formatter, queryScorer);
+        for (ScoreDoc scoreDoc : scoreDocs) {
+            Document document = indexReader.document(scoreDoc.doc);
+            String videoId = document.get("videoId");
+            String title = document.get("title");
+            String htmlTitle = highlighter.getBestFragment(analyzer, field, title);
+            map.put(videoId, htmlTitle);
+        }
+        return new SearchResult(total, map);
+    }
+
+    public SearchResult highlighter(String keyword, int pageSize, int pageNumber)
+            throws IOException, InvalidTokenOffsetsException, ParseException {
+        Directory directory = FSDirectory.open(Paths.get(indexDirPath));
+        IndexReader indexReader = DirectoryReader.open(directory);
+        IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+
+        String field = "title";
+        Analyzer analyzer = new IKAnalyzer();
+
+        QueryParser queryParser = new QueryParser(field, analyzer);
+        Query query = queryParser.parse(keyword);
+        TermQuery termQuery = new TermQuery(new Term(field, keyword));
+        FuzzyQuery fuzzyQuery = new FuzzyQuery(new Term(field, keyword), 1);
+        PhraseQuery.Builder builder = new PhraseQuery.Builder();
+        builder.add(new Term(field, keyword), 1);
+        PhraseQuery phraseQuery = builder.build();
+
+        long total;
+        TopDocs topDocs;
+        if (pageNumber == 1) {
+            topDocs = indexSearcher.search(termQuery, pageSize);
+            //topDocs = indexSearcher.searchAfter(null, query, pageSize);
+            total = topDocs.totalHits.value;
+        } else {
+            int count = (pageNumber-1)*pageSize;
+            TopDocs prevTopDocs = indexSearcher.searchAfter(null, query, count);
+            total = prevTopDocs.totalHits.value;
+
+            ScoreDoc[] prevScoreDocs = prevTopDocs.scoreDocs;
+            ScoreDoc after = prevScoreDocs[prevScoreDocs.length-1];
+            topDocs = indexSearcher.searchAfter(after, query, pageSize);
+        }
+
+        ScoreDoc[] scoreDocs = topDocs.scoreDocs;
+        Map<String, String> map = new HashMap<>();
+        SimpleHTMLFormatter formatter = new SimpleHTMLFormatter("<span style='color:red;'>", "</span>");
+        QueryScorer queryScorer = new QueryScorer(query);
+        Highlighter highlighter = new Highlighter(formatter, queryScorer);
+        for (ScoreDoc scoreDoc : scoreDocs) {
+            Document document = indexReader.document(scoreDoc.doc);
+            String videoId = document.get("videoId");
+            String title = document.get("title");
+            String htmlTitle = highlighter.getBestFragment(analyzer, field, title);
+            map.put(videoId, htmlTitle);
+        }
+        return new SearchResult(total, map);
+    }
+
+    static void query(Query query) {
+        try {
+            Directory directory = FSDirectory.open(Paths.get(indexDirPath));
+            IndexReader indexReader = DirectoryReader.open(directory);
+            IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+
+            // 查询前 100 条数据
+            TopDocs topDocs = indexSearcher.search(query, 100);
+            log.info("本次搜索共找到" + topDocs.totalHits.value + "条数据");
+            ScoreDoc[] scoreDocs = topDocs.scoreDocs;
+            for (ScoreDoc scoreDoc : scoreDocs) {
+                Document document = indexReader.document(scoreDoc.doc);
+                log.info(document.toString());
+                //log.info("id={},name={},poems={},success={},score={}", document.get("id"), document.get("name"), document.get("poems"), document.get("success"), scoreDoc.score);
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    static void termQuery() {
+        Query query = new TermQuery(new Term("name", "李白"));
+        query(query);
+    }
+
+    static PrefixQuery prefixQuery(String field, String startWith) {
+        return new PrefixQuery(new Term(field, startWith));
+    }
+
+    static void prefixQuery1(String field, String startWith) {
+        Query query1 = new TermQuery(new Term("tableName", "RujiaHotel"));
+        Query query2 = new PrefixQuery(new Term(field, startWith));
+
+        BooleanQuery.Builder builder = new BooleanQuery.Builder();
+        builder.add(query1, BooleanClause.Occur.MUST);
+        builder.add(query2, BooleanClause.Occur.MUST);
+        BooleanQuery booleanQuery = builder.build();
+        query(booleanQuery);
+    }
+
+    static void wildcardQuery() {
+        Query query = new WildcardQuery(new Term("name", "李?"));
+        query(query);
+    }
+
+    static void fuzzyQuery() {
+        //"李百"->"李白",只需修改一次,故可以搜索到数据;"里百"则搜索不到数据
+        Query query = new FuzzyQuery(new Term("name", "里百"), 1);
+        query(query);
+    }
+
+    static void numberQuery() {
+        //精确查询
+        Query query = LongPoint.newExactQuery("id", 123456);
+        query(query);
+
+        //范围查询
+        query = LongPoint.newRangeQuery("id", 123L, 12345678L);
+        query(query);
+    }
+
+    static void booleanQuery() {
+        Query query1 = new TermQuery(new Term("name", "李白"));
+        Query query2 = new TermQuery(new Term("name", "杜甫"));
+        BooleanQuery.Builder builder = new BooleanQuery.Builder();
+        builder.add(query1, BooleanClause.Occur.SHOULD);
+        builder.add(query2, BooleanClause.Occur.SHOULD);
+        BooleanQuery booleanQuery = builder.build();
+        query(booleanQuery);
+    }
+
+    static void boostQuery() {
+        Query query1 = new BoostQuery(new TermQuery(new Term("name", "李白")), 1.5f);
+        Query query2 = new BoostQuery(new TermQuery(new Term("name", "杜甫")), 1.6f);
+        BooleanQuery.Builder builder = new BooleanQuery.Builder();
+        builder.add(query1, BooleanClause.Occur.SHOULD);
+        builder.add(query2, BooleanClause.Occur.SHOULD);
+        BooleanQuery booleanQuery = builder.build();
+
+        query(booleanQuery);
+    }
+
+    static void queryParserQuery() throws ParseException {
+        IKAnalyzer ikAnalyzer = new IKAnalyzer();
+        QueryParser queryParser = new QueryParser("name", ikAnalyzer);
+        Query query = queryParser.parse("李白和杜甫");
+        query(query);
+
+        //多字段查询
+        MultiFieldQueryParser multiFieldQueryParser = new MultiFieldQueryParser(new String[]{"name", "about"}, ikAnalyzer);
+        query = multiFieldQueryParser.parse("李白和子美");
+        query(query);
+    }
+}

+ 33 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/model/po/NginxLog.java

@@ -0,0 +1,33 @@
+package cn.reghao.tnb.search.app.model.po;
+
+import com.google.gson.annotations.SerializedName;
+import lombok.Getter;
+import lombok.Setter;
+
+import java.io.Serializable;
+
+/**
+ * @author reghao
+ * @date 2023-11-07 14:58:07
+ */
+@Setter
+@Getter
+public class NginxLog implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    private String id;
+    @SerializedName("time_iso8601") private String timeIso8601;
+    @SerializedName("remote_addr") private String remoteAddr;
+    private String request;
+    private Integer status;
+    @SerializedName("request_method") private String requestMethod;
+    @SerializedName("body_bytes_sent") private Integer bodyBytesSent;
+    @SerializedName("request_time") private Double requestTime;
+    @SerializedName("upstream_response_time") private String upstreamResponseTime;
+    @SerializedName("upstream_addr") private String upstreamAddr;
+    private String host;
+    private String url;
+    @SerializedName("http_x_forwarded_for") private String httpXForwardedFor;
+    @SerializedName("http_referer") private String httpReferer;
+    @SerializedName("http_user_agent") private String httpUserAgent;
+}

+ 35 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/model/po/VideoText.java

@@ -0,0 +1,35 @@
+package cn.reghao.tnb.search.app.model.po;
+
+import cn.reghao.tnb.search.api.dto.VideoSummary;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+/**
+ * @author reghao
+ * @date 2025-04-02 09:19:53
+ */
+@NoArgsConstructor
+@Setter
+@Getter
+public class VideoText {
+    private String id;
+    private String videoId;
+    private String title;
+    private String description;
+    private Boolean vip;
+
+    public VideoText(VideoSummary videoSummary) {
+        this.videoId = videoSummary.getVideoId();
+        this.title = videoSummary.getTitle();
+        this.description = videoSummary.getDescription();
+        this.vip = videoSummary.getVip();
+    }
+
+    public VideoText(String videoId, String title, Boolean vip) {
+        this.videoId = videoId;
+        this.title = title;
+        this.description = title;
+        this.vip = vip;
+    }
+}

+ 42 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/model/po/Wenshu.java

@@ -0,0 +1,42 @@
+package cn.reghao.tnb.search.app.model.po;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * @author reghao
+ * @date 2025-03-18 09:50:13
+ */
+@Setter
+@Getter
+public class Wenshu {
+    private String id;
+    // 原始链接
+    private String originalUrl;
+    // 案号
+    private String caseId;
+    // 案件名称
+    private String caseName;
+    // 法院
+    private String court;
+    // 所属地区
+    private String region;
+    // 案件类型
+    private String caseType;
+    // 案件类型编码
+    private String caseTypeId;
+    // 审理程序
+    private String procedure;
+    // 裁判日期
+    private String judgmentDate;
+    // 发布日期
+    private String publicDate;
+    // 当事人
+    private String parties;
+    // 案由
+    private String cause;
+    // 法律依据
+    private String legalBasis;
+    // 全文
+    private String fullText;
+}

+ 17 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/model/vo/SearchResult.java

@@ -0,0 +1,17 @@
+package cn.reghao.tnb.search.app.model.vo;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+import java.util.Map;
+
+/**
+ * @author reghao
+ * @date 2023-05-01 20:15:24
+ */
+@AllArgsConstructor
+@Getter
+public class SearchResult {
+    private long total;
+    private Map<String, String> result;
+}

+ 42 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/model/vo/VideoCard.java

@@ -0,0 +1,42 @@
+package cn.reghao.tnb.search.app.model.vo;
+
+import cn.reghao.tnb.content.api.dto.VideoPostCard;
+import cn.reghao.tnb.user.api.dto.UserCard;
+import lombok.Getter;
+import lombok.Setter;
+
+import java.io.Serializable;
+
+/**
+ * @author reghao
+ * @date 2023-04-24 03:07:22
+ */
+@Setter
+@Getter
+public class VideoCard implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    private String videoId;
+    private String title;
+    private String coverUrl;
+    private String duration;
+    private boolean horizontal;
+    private boolean cached;
+    private String pubDateStr;
+    private UserCard user;
+    private int view;
+    private int comment;
+
+    public VideoCard(VideoPostCard videoPostCard, String durationStr, boolean cached, String pubDateStr, UserCard user) {
+        this.videoId = videoPostCard.getVideoId();
+        this.title = videoPostCard.getTitle();
+        this.coverUrl = videoPostCard.getCoverUrl();
+        this.duration = durationStr;
+        this.horizontal = videoPostCard.isHorizontal();
+        this.cached = cached;
+        this.pubDateStr = pubDateStr;
+        this.user = user;
+        this.view = videoPostCard.getView();
+        this.comment = videoPostCard.getComment();
+    }
+}

+ 19 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/model/vo/VideoProjection.java

@@ -0,0 +1,19 @@
+package cn.reghao.tnb.search.app.model.vo;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+import java.util.List;
+
+/**
+ * @author reghao
+ * @date 2025-04-02 09:39:36
+ */
+@AllArgsConstructor
+@NoArgsConstructor
+@Getter
+public class VideoProjection {
+    private String videoId;
+    private List<String> highlightTitle;
+}

+ 130 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/rpc/DataSearchServiceImpl.java

@@ -0,0 +1,130 @@
+package cn.reghao.tnb.search.app.rpc;
+
+import cn.reghao.tnb.search.api.dto.VideoSummary;
+import cn.reghao.tnb.search.api.iface.DataSearchService;
+import cn.reghao.tnb.search.app.es.DocumentService;
+import cn.reghao.tnb.search.app.es.ElasticService;
+import cn.reghao.tnb.search.app.es.QueryService;
+import cn.reghao.tnb.search.app.lucene.LuceneIndex;
+import cn.reghao.tnb.search.app.lucene.LuceneQuery;
+import cn.reghao.tnb.search.app.model.po.VideoText;
+import cn.reghao.jutil.jdk.db.PageList;
+import co.elastic.clients.elasticsearch._types.FieldValue;
+import co.elastic.clients.elasticsearch._types.query_dsl.BoolQuery;
+import co.elastic.clients.elasticsearch._types.query_dsl.Query;
+import co.elastic.clients.elasticsearch._types.query_dsl.QueryStringQuery;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.dubbo.config.annotation.DubboService;
+import org.springframework.data.domain.Page;
+import org.springframework.stereotype.Service;
+import org.wltea.analyzer.core.IKSegmenter;
+import org.wltea.analyzer.core.Lexeme;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+import java.util.stream.Collectors;
+
+/**
+ * @author reghao
+ * @date 2025-03-27 13:55:21
+ */
+@Slf4j
+@DubboService
+@Service
+public class DataSearchServiceImpl implements DataSearchService {
+    private final String index = VideoText.class.getSimpleName().toLowerCase(Locale.ROOT);
+    private int ps = 20;
+    private final DocumentService documentService;
+    private final LuceneIndex luceneIndex;
+    private final QueryService<VideoText> esQuery;
+    private final LuceneQuery luceneQuery;
+
+    public DataSearchServiceImpl(ElasticService elasticService, LuceneQuery luceneQuery, LuceneIndex luceneIndex) {
+        this.esQuery = new QueryService<>(elasticService);
+        this.documentService = new DocumentService(elasticService);
+        this.luceneQuery = luceneQuery;
+        this.luceneIndex = luceneIndex;
+    }
+
+    public void addVideoSummary(VideoSummary videoSummary) {
+        int type = 1;
+        try {
+            VideoText videoText = new VideoText(videoSummary);
+            if (type == 1) {
+                documentService.addVideoText(index, videoText);
+            } else{
+                luceneIndex.createVideoTextIndex(videoText);
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    public void updateVideoSummary(VideoSummary videoSummary) {
+    }
+
+    @Override
+    public void deleteVideoSummary(String videoId) {
+    }
+
+    @Override
+    public PageList<VideoSummary> searchVideo(String keyword, String nextIdStr, int pn) {
+        QueryStringQuery stringQuery = new QueryStringQuery.Builder()
+                // 查询的字段
+                .fields("title")
+                .query(keyword)
+                .build();
+
+        boolean vip1 = Boolean.parseBoolean(nextIdStr);
+        Query query;
+        if (!vip1) {
+            query = new Query.Builder()
+                    .queryString(stringQuery)
+                    .build();
+        } else {
+            query = new Query.Builder()
+                    .bool(BoolQuery.of(b -> b
+                            .must(m -> m.term(t -> t.field("vip").value(FieldValue.of(vip1))))
+                            .must(m -> m.queryString(stringQuery))))
+                    .build();
+        }
+
+        Page<VideoText> page = esQuery.queryWithHighlight(index, query, pn, ps, VideoText.class);
+        //Page<VideoText> page = esQuery.queryWithHighlight(index, keyword, pn, ps, VideoText.class);
+        //Page<VideoText> page1 = luceneQuery.queryWithHighlight(index, keyword, pn, ps);
+        List<VideoSummary> list = page.stream().map(videoText -> {
+            String videoId = videoText.getVideoId();
+            String title = videoText.getTitle();
+            String description = videoText.getDescription();
+            boolean vip = videoText.getVip();
+            return new VideoSummary(videoId, title, description, vip);
+        }).collect(Collectors.toList());
+        return PageList.pageList(pn, ps, (int) page.getTotalElements(), list);
+    }
+
+    /**
+     * 使用 ik 分词器对文本进行分词
+     *
+     * @param
+     * @return
+     * @date 2025-03-31 15:35:41
+     */
+    public List<String> segment(String text) {
+        List<String> words = new ArrayList<>();
+        try (StringReader reader = new StringReader(text)) {
+            IKSegmenter segmenter = new IKSegmenter(reader, true);
+            Lexeme lexeme;
+            while ((lexeme = segmenter.next()) != null) {
+                words.add(lexeme.getLexemeText());
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+
+        return words;
+    }
+}

+ 38 - 0
search/search-service/src/main/java/cn/reghao/tnb/search/app/util/ClassUtil.java

@@ -0,0 +1,38 @@
+package cn.reghao.tnb.search.app.util;
+
+import java.lang.reflect.Field;
+
+/**
+ * @author reghao
+ * @date 2023-04-05 16:50:42
+ */
+public class ClassUtil {
+    public static Object getObject(Class<?> clazz, String[] strs) throws Exception {
+        Field[] fields = clazz.getDeclaredFields();
+        if (strs.length != fields.length) {
+            return null;
+        }
+
+        Object object = clazz.getDeclaredConstructor().newInstance();
+        for (int i = 0; i < fields.length; i++) {
+            Class<?> clazzType = fields[i].getType();
+            if (clazzType.equals(String.class)) {
+                fields[i].setAccessible(true);
+                fields[i].set(object, strs[i]);
+            } else {
+                Object result;
+                if (clazzType.equals(Long.class)) {
+                    result = Long.parseLong(strs[i]);
+                } else if (clazzType.equals(Integer.class)) {
+                    result = Integer.parseInt(strs[i]);
+                } else {
+                    result = Double.parseDouble(strs[i]);
+                }
+
+                fields[i].setAccessible(true);
+                fields[i].set(object, result);
+            }
+        }
+        return object;
+    }
+}

+ 21 - 0
search/search-service/src/main/resources/application-cluster.yml

@@ -0,0 +1,21 @@
+log:
+  enabled: false
+  ws-url: ws://bnt.reghao.cn/ws/log/push
+  token: 012345678
+dubbo:
+  registry:
+    address: zookeeper://localhost:2181
+spring:
+  cloud:
+    discovery:
+      enabled: true
+  data:
+  redis:
+    database: 0
+    host: localhost
+    port: 6379
+    password: Dev@123456
+eureka:
+  client:
+    service-url:
+      defaultZone: http://localhost:6060/eureka/

+ 21 - 0
search/search-service/src/main/resources/application-dev.yml

@@ -0,0 +1,21 @@
+log:
+  enabled: false
+  ws-url: ws://bnt.reghao.cn/ws/log/push
+  token: 012345678
+dubbo:
+  registry:
+    address: zookeeper://localhost:2181
+spring:
+  cloud:
+    discovery:
+      enabled: true
+  data:
+  redis:
+    database: 0
+    host: localhost
+    port: 6379
+    password: Dev@123456
+eureka:
+  client:
+    service-url:
+      defaultZone: http://localhost:6060/eureka/

+ 21 - 0
search/search-service/src/main/resources/application-test.yml

@@ -0,0 +1,21 @@
+log:
+  enabled: false
+  ws-url: ws://bnt.reghao.cn/ws/log/push
+  token: 012345678
+dubbo:
+  registry:
+    address: zookeeper://localhost:2181
+spring:
+  cloud:
+    discovery:
+      enabled: true
+  data:
+  redis:
+    database: 0
+    host: localhost
+    port: 6379
+    password: Dev@123456
+eureka:
+  client:
+    service-url:
+      defaultZone: http://localhost:6060/eureka/

+ 38 - 0
search/search-service/src/main/resources/application.yml

@@ -0,0 +1,38 @@
+dubbo:
+  application:
+    name: search-service
+    qos-enable: true
+    qos-port: 16107
+    qos-accept-foreign-ip: false
+  scan:
+    base-packages: cn.reghao.tnb.search.app.rpc
+  protocol:
+    name: dubbo
+    port: 6107
+server:
+  port: 6007
+  tomcat:
+    max-http-form-post-size: 4MB
+spring:
+  application:
+    name: search-service
+  profiles:
+    active: @profile.active@
+  datasource:
+    driver-class-name: com.mysql.cj.jdbc.Driver
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 5
+      maximum-pool-size: 10
+      auto-commit: true
+      idle-timeout: 30000
+      pool-name: EvaluationHikariCP
+      max-lifetime: 1800000
+      connection-timeout: 30000
+      connection-test-query: SELECT 1
+eureka:
+  instance:
+    prefer-ip-address: true
+  client:
+    register-with-eureka: true
+    fetch-registry: true

+ 74 - 0
search/search-service/src/main/resources/logback-spring.xml

@@ -0,0 +1,74 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<configuration>
+    <appender name="consoleLog" class="ch.qos.logback.core.ConsoleAppender">
+        <layout class="ch.qos.logback.classic.PatternLayout">
+            <pattern>
+                %d{HH:mm:ss.SSS} [%thread] %-5level %c %M %L - %msg%n
+            </pattern>
+        </layout>
+    </appender>
+
+    <!-- info 日志文件 -->
+    <appender name="fileInfoLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>ERROR</level>
+            <onMatch>DENY</onMatch>
+            <onMismatch>ACCEPT</onMismatch>
+        </filter>
+        <encoder>
+            <pattern>
+                %d{HH:mm:ss.SSS} %-5level %c %M %L - %msg%n
+            </pattern>
+            <charset>UTF-8</charset>
+        </encoder>
+        <!-- 滚动策略 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>
+                logs/content-info.%d.log
+            </fileNamePattern>
+        </rollingPolicy>
+    </appender>
+
+    <!-- error 日志文件 -->
+    <appender name="fileErrorLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+            <level>ERROR</level>
+        </filter>
+        <encoder>
+            <pattern>
+                %d{HH:mm:ss.SSS} %-5level %c %M %L - %msg%n
+            </pattern>
+            <charset>UTF-8</charset>
+        </encoder>
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>
+                logs/content-error.%d.log
+            </fileNamePattern>
+        </rollingPolicy>
+    </appender>
+
+    <springProfile name="dev">
+        <root level="info">
+            <appender-ref ref="consoleLog"></appender-ref>
+        </root>
+    </springProfile>
+    <springProfile name="test">
+        <root level="info">
+            <appender-ref ref="fileInfoLog"></appender-ref>
+            <appender-ref ref="fileErrorLog"></appender-ref>
+        </root>
+    </springProfile>
+    <springProfile name="prod">
+        <root level="info">
+            <appender-ref ref="fileInfoLog"></appender-ref>
+            <appender-ref ref="fileErrorLog"></appender-ref>
+        </root>
+    </springProfile>
+    <springProfile name="cluster">
+        <root level="info">
+            <appender-ref ref="fileInfoLog"></appender-ref>
+            <appender-ref ref="fileErrorLog"></appender-ref>
+        </root>
+    </springProfile>
+</configuration>

+ 270 - 0
search/search-service/src/test/java/SearchTest.java

@@ -0,0 +1,270 @@
+import ch.qos.logback.classic.Level;
+import ch.qos.logback.classic.Logger;
+import ch.qos.logback.classic.LoggerContext;
+import cn.reghao.jutil.tool.id.SnowFlake;
+import cn.reghao.tnb.search.api.dto.VideoSummary;
+import cn.reghao.tnb.search.app.SearchApplication;
+import cn.reghao.tnb.search.app.es.*;
+import cn.reghao.tnb.search.app.lucene.LuceneDocument;
+import cn.reghao.tnb.search.app.lucene.LuceneIndex;
+import cn.reghao.tnb.search.app.lucene.LuceneQuery;
+import cn.reghao.tnb.search.app.model.po.VideoText;
+import cn.reghao.tnb.search.app.model.po.Wenshu;
+import cn.reghao.tnb.search.app.util.ClassUtil;
+import co.elastic.clients.elasticsearch.ElasticsearchClient;
+import co.elastic.clients.elasticsearch._types.mapping.Property;
+import co.elastic.clients.elasticsearch.indices.AnalyzeRequest;
+import co.elastic.clients.elasticsearch.indices.AnalyzeResponse;
+import co.elastic.clients.elasticsearch.indices.analyze.AnalyzeToken;
+import lombok.extern.slf4j.Slf4j;
+import org.junit.jupiter.api.Test;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.data.domain.Page;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.test.context.ActiveProfiles;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.util.*;
+
+/**
+ * @author reghao
+ * @date 2025-03-18 15:51:21
+ */
+@Slf4j
+@ActiveProfiles("dev")
+@SpringBootTest(classes = SearchApplication.class)
+public class SearchTest {
+    SnowFlake idGenerator = new SnowFlake(1, 1);
+    @Autowired
+    ElasticService elasticService;
+    @Autowired
+    IndexService indexService;
+    @Autowired
+    MappingService mappingService;
+    @Autowired
+    DocumentService documentService;
+    @Autowired
+    QueryService<Wenshu> queryService;
+    String index = Wenshu.class.getSimpleName().toLowerCase(Locale.ROOT);
+
+    void readByFileChannel(String filePath, DocumentService documentService) {
+        List<Wenshu> list = new ArrayList<>();
+        File file = new File(filePath);
+        try {
+            FileInputStream fis = new FileInputStream(file);
+            FileChannel fileChannel = fis.getChannel();
+
+            int total = 0;
+            // 10MB
+            int capacity = 10*1024*1024;
+            ByteBuffer byteBuffer = ByteBuffer.allocate(capacity);
+            StringBuffer buffer = new StringBuffer();
+            while(fileChannel.read(byteBuffer) != -1) {
+                //读取后,将位置置为0,将limit置为容量, 以备下次读入到字节缓冲中,从0开始存储
+                byteBuffer.clear();
+                byte[] bytes = byteBuffer.array();
+
+                String str = new String(bytes);
+                buffer.append(str);
+                String[] strArray = buffer.toString().split(System.lineSeparator());
+                for (int i = 0; i < strArray.length-1; i++) {
+                    String line = strArray[i];
+                    Wenshu wenshu = parseLine(line);
+                    if (wenshu != null) {
+                        list.add(wenshu);
+                    } else {
+                        log.error("error parse line: {}", ++total);
+                    }
+
+                    if (list.size() > 10_000) {
+                        documentService.batchAddDocument1(index, list);
+                        log.info("add {} documents to es", list.size());
+                        list.clear();
+                    }
+                }
+
+                String lastLine = strArray[strArray.length-1];
+                if (!lastLine.endsWith("}")) {
+                    buffer = new StringBuffer();
+                    buffer.append(strArray[strArray.length-1]);
+                }
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        } finally {
+            // TODO close 处理
+        }
+    }
+
+    public void indexTest() throws IOException {
+        Map<String, Property> propertyMap = mappingService.getWenshuPropertyMap();
+        indexService.deleteIndex(index);
+        indexService.createIndex(index, propertyMap);
+    }
+
+    void setLogLevel() {
+        LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
+        Logger rootLogger = loggerContext.getLogger("ROOT");
+        rootLogger.setLevel(Level.INFO);
+    }
+
+    public void addDocTest() {
+        setLogLevel();
+        documentService.deleteAllDocument(index);
+        String filePath = "/home/reghao/Downloads/2021年01月裁判文书数据.csv";
+        readByFileChannel(filePath, documentService);
+    }
+
+    private Wenshu parseLine(String line) {
+        String[] arr = line.split(",");
+        try {
+            List<String> fields = new ArrayList<>();
+            String id = idGenerator.nextId()+"";
+            fields.add(id);
+
+            String originalUrl = arr[0];
+            String caseId = arr[1];
+            String caseName = arr[2];
+            String court = arr[3];
+            String region = arr[4];
+            String caseType = arr[5];
+            String caseTypeId = arr[6];
+            fields.addAll(Arrays.asList(arr).subList(0, 7));
+            String procedure = arr[8];
+            fields.add(procedure);
+            String judgmentDate = arr[9];
+            fields.add(judgmentDate);
+            String publicDate = arr[10];
+            fields.add(publicDate);
+            String parties = arr[11];
+            fields.add(parties);
+
+            String cause = "";
+            String legalBasis = "";
+            String fullText = "";
+            if (arr.length == 13) {
+                cause = arr[12];
+            } else if (arr.length == 14) {
+                cause = arr[12];
+                legalBasis = arr[13];
+            } else if (arr.length == 15) {
+                cause = arr[12];
+                legalBasis = arr[13];
+                fullText = arr[14];
+            } else if (arr.length > 15) {
+                cause = arr[12];
+                legalBasis = arr[13];
+                String str = line.split(legalBasis)[1];
+                fullText = str.substring(1);
+            }
+
+            fields.add(cause);
+            fields.add(legalBasis);
+            fields.add(fullText);
+
+            Object object = ClassUtil.getObject(Wenshu.class, fields.toArray(new String[0]));
+            if (object instanceof  Wenshu) {
+                return (Wenshu) object;
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        return null;
+    }
+
+    public void queryTest() {
+        setLogLevel();
+
+        int pn = 1;
+        int ps = 10;
+        String queryString = "拐卖";
+        /*List<Wenshu> page = queryService.queryWithHighlight(index, queryString, pn, ps, Wenshu.class);
+        while (!page.isEmpty()) {
+            pn++;
+            page = queryService.queryWithHighlight(index, queryString, pn, ps, Wenshu.class);
+        }*/
+
+        System.out.println();
+
+        Page<Wenshu> page = queryService.queryWithHighlight(index, queryString, pn, ps, Wenshu.class);
+        long total = page.getTotalElements();
+        int totalPages = page.getTotalPages();
+        while (pn <= totalPages) {
+            page = queryService.queryWithHighlight(index, queryString, pn, ps, Wenshu.class);
+            List<Wenshu> list = page.getContent();
+            Wenshu wenshu = list.get(0);
+            Wenshu wenshu1 = queryService.queryById(Wenshu.class, index, wenshu.getId());
+            pn++;
+        }
+    }
+
+    @Autowired
+    LuceneQuery luceneQuery;
+    @Autowired
+    LuceneIndex luceneIndex;
+    @Autowired
+    LuceneDocument luceneDocument;
+    @Autowired
+    QueryService<VideoSummary> esQuery;
+    @Autowired
+    SearchService searchService;
+    @Test
+    public void storeTest() throws Exception {
+        int pn = 1;
+        int ps = 12;
+        PageRequest pageRequest = PageRequest.of(pn-1, ps);
+        String videoId = "ao1n8ggYOg";
+        /*Document document = luceneIndex.getDocument(videoId);
+        VideoSummary videoSummary = (VideoSummary) luceneDocument.getObject(VideoSummary.class, document);
+
+        String title = "哈哈哈哈哈5哈哈哈哈4哈1";
+        videoSummary.setTitle(title);
+        videoSummary.setId("");
+        videoSummary.setDescription("");
+
+        Document document1 = luceneDocument.getVideoSummaryDoc(videoSummary);
+        luceneIndex.updateIndex(videoId, document1);
+
+        Document document2 = luceneIndex.getDocument(videoId);
+        Object object2 = luceneDocument.getObject(VideoSummary.class, document2);*/
+
+        String index = VideoText.class.getSimpleName().toLowerCase(Locale.ROOT);
+        String kw = "隔壁";
+        /*Page<VideoSummary> page = hibernateQuery.queryWithHighlight(kw, pn, ps);
+        Page<VideoSummary> page1 = luceneQuery.queryWithHighlight("", kw, pn, ps);*/
+        //Page<VideoSummary> page2 = esQuery.queryWithHighlight(index, kw, pn, ps, VideoSummary.class);
+        System.out.println();
+
+        Map<String, Property> propertyMap = mappingService.getVideoTextPropertyMap();
+//        indexService.deleteIndex(index);
+//        indexService.createIndex(index, propertyMap);
+        //indexService.getIndex(index);
+        indexService.deleteIndex(index);
+        //indexService.getIndex(index);
+        //indexService.updateMapping(index);
+        //searchService.searchAll(index);
+        //searchService.count(index);
+    }
+
+    @Test
+    public void analyzerTest() throws IOException {
+        String text = "中华人民共和国国歌";
+        String analyzer = "standard";
+        analyzer = "ik_max_word";
+        //analyzer = "ik_smart";
+        AnalyzeRequest analyzeRequest = new AnalyzeRequest.Builder()
+                .text(text)
+                .analyzer(analyzer)
+                .build();
+        ElasticsearchClient esClient = elasticService.getElasticsearchClient();
+        AnalyzeResponse analyzeResponse = esClient.indices().analyze(analyzeRequest);
+        List<AnalyzeToken> tokens = analyzeResponse.tokens();
+    }
+}

+ 1 - 0
zzz/build_jar.sh

@@ -42,6 +42,7 @@ exec_jar() {
   nohup java -jar ${proj_dir}/file/file-service/target/tnb-file.jar > tnb-file.log 2>&1 &
   nohup java -jar ${proj_dir}/file/file-service/target/tnb-file.jar > tnb-file.log 2>&1 &
   nohup java -jar ${proj_dir}/content/content-service/target/tnb-content.jar > tnb-content.log 2>&1 &
   nohup java -jar ${proj_dir}/content/content-service/target/tnb-content.jar > tnb-content.log 2>&1 &
   nohup java -jar ${proj_dir}/data/data-service/target/tnb-data.jar > tnb-data.log 2>&1 &
   nohup java -jar ${proj_dir}/data/data-service/target/tnb-data.jar > tnb-data.log 2>&1 &
+  nohup java -jar ${proj_dir}/search/search-service/target/tnb-search.jar > tnb-search.log 2>&1 &
 }
 }
 
 
 ###############################################################################
 ###############################################################################

+ 2 - 0
zzz/install_tnb.sh

@@ -18,6 +18,7 @@ install_dependencies() {
  mvn clean install -am -Dmaven.test.skip -pl content/content-api/
  mvn clean install -am -Dmaven.test.skip -pl content/content-api/
  mvn clean install -am -Dmaven.test.skip -pl file/file-api/
  mvn clean install -am -Dmaven.test.skip -pl file/file-api/
  mvn clean install -am -Dmaven.test.skip -pl data/data-api/
  mvn clean install -am -Dmaven.test.skip -pl data/data-api/
+ mvn clean install -am -Dmaven.test.skip -pl search/search-api/
 
 
  #cd ~/code/java/tnb/tinyurl/
  #cd ~/code/java/tnb/tinyurl/
  #mvn clean install -am -Dmaven.test.skip -pl tinyurl-api/
  #mvn clean install -am -Dmaven.test.skip -pl tinyurl-api/
@@ -33,6 +34,7 @@ deploy_dependencies() {
   mvn clean deploy -am -Dmaven.test.skip -pl content/content-api/
   mvn clean deploy -am -Dmaven.test.skip -pl content/content-api/
   mvn clean deploy -am -Dmaven.test.skip -pl file/file-api/
   mvn clean deploy -am -Dmaven.test.skip -pl file/file-api/
   mvn clean deploy -am -Dmaven.test.skip -pl data/data-api/
   mvn clean deploy -am -Dmaven.test.skip -pl data/data-api/
+  mvn clean deploy -am -Dmaven.test.skip -pl search/search-api/
 
 
   #cd ~/code/java/tnb/tinyurl/
   #cd ~/code/java/tnb/tinyurl/
   #mvn clean deploy -am -Dmaven.test.skip -pl tinyurl-api/
   #mvn clean deploy -am -Dmaven.test.skip -pl tinyurl-api/