add:新增分词搜索

This commit is contained in:
不考上研不改网名 2025-11-07 09:32:33 +08:00
parent 6fba583b62
commit 405e470b8d
38 changed files with 1901 additions and 590 deletions

4
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,4 @@
{
"java.configuration.updateBuildConfiguration": "automatic",
"java.compile.nullAnalysis.mode": "automatic"
}

View File

@ -78,6 +78,12 @@
<artifactId>poi-ooxml</artifactId> <artifactId>poi-ooxml</artifactId>
<version>5.2.3</version> <version>5.2.3</version>
</dependency> </dependency>
<!-- HanLP自然语言处理 -->
<dependency>
<groupId>com.hankcs</groupId>
<artifactId>hanlp</artifactId>
<version>portable-1.8.3</version>
</dependency>
</dependencies> </dependencies>

View File

@ -0,0 +1,140 @@
package com.warmer.base.service;
import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.seg.common.Term;
import com.hankcs.hanlp.tokenizer.StandardTokenizer;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.stream.Collectors;
/**
* 关键字提取服务
*
* @author tanc
*/
@Service
public class KeywordExtractionService {
// 停用词集合
private static final Set<String> STOP_WORDS = new HashSet<>(Arrays.asList(
"", "", "", "", "", "", "", "", "", "", "", "", "一个", "", "", "", "", "", "", "", "", "", "", "没有", "", "", "自己", ""
));
// 故障相关的重要词性
private static final Set<String> IMPORTANT_POS = new HashSet<>(Arrays.asList(
"n", "nr", "ns", "nt", "nz", "nl", "ng", "v", "vn", "a", "an"
));
/**
* 从故障描述中提取关键字
*
* @param text 故障描述文本
* @return 关键字列表按重要性排序
*/
// 强化自定义词典保证行业术语被识别为稳定分词
static {
try {
// 先设置 IOAdapter jar classpath 读取 HanLP 资源
com.hankcs.hanlp.HanLP.Config.IOAdapter = new com.hankcs.hanlp.corpus.io.ResourceIOAdapter();
// 强化自定义词典保证行业术语被识别为稳定分词
com.hankcs.hanlp.dictionary.CustomDictionary.add("冷轧", "nz 1000");
com.hankcs.hanlp.dictionary.CustomDictionary.add("热轧", "nz 1000");
com.hankcs.hanlp.dictionary.CustomDictionary.add("轴承座", "n 1000");
com.hankcs.hanlp.dictionary.CustomDictionary.add("轴承", "n 1000");
} catch (Exception ignored) {}
}
public List<String> extractKeywords(String text) {
if (text == null || text.trim().isEmpty()) {
return new ArrayList<>();
}
// 使用HanLP进行分词和词性标注
List<Term> terms = StandardTokenizer.segment(text);
// 提取关键词
Map<String, Integer> keywordCount = new HashMap<>();
for (Term term : terms) {
String word = term.word.trim();
String nature = term.nature.toString();
// 过滤条件长度大于1不是停用词词性重要
if (word.length() > 1 &&
!STOP_WORDS.contains(word) &&
IMPORTANT_POS.contains(nature)) {
keywordCount.put(word, keywordCount.getOrDefault(word, 0) + 1);
}
}
// 按词频排序返回关键字列表
return keywordCount.entrySet().stream()
.sorted(Map.Entry.<String, Integer>comparingByValue().reversed())
.map(Map.Entry::getKey)
.limit(10) // 最多返回10个关键字
.collect(Collectors.toList());
}
/**
* 提取关键字并返回详细信息
*
* @param text 故障描述文本
* @return 关键字详细信息
*/
public List<KeywordInfo> extractKeywordsWithInfo(String text) {
if (text == null || text.trim().isEmpty()) {
return new ArrayList<>();
}
List<Term> terms = StandardTokenizer.segment(text);
Map<String, KeywordInfo> keywordMap = new HashMap<>();
for (Term term : terms) {
String word = term.word.trim();
String nature = term.nature.toString();
if (word.length() > 1 &&
!STOP_WORDS.contains(word) &&
IMPORTANT_POS.contains(nature)) {
KeywordInfo info = keywordMap.getOrDefault(word, new KeywordInfo(word, nature));
info.incrementCount();
keywordMap.put(word, info);
}
}
return keywordMap.values().stream()
.sorted((a, b) -> Integer.compare(b.getCount(), a.getCount()))
.limit(10)
.collect(Collectors.toList());
}
/**
* 关键字信息类
*/
public static class KeywordInfo {
private String keyword;
private String partOfSpeech;
private int count;
public KeywordInfo(String keyword, String partOfSpeech) {
this.keyword = keyword;
this.partOfSpeech = partOfSpeech;
this.count = 1;
}
public void incrementCount() {
this.count++;
}
// Getters and Setters
public String getKeyword() { return keyword; }
public void setKeyword(String keyword) { this.keyword = keyword; }
public String getPartOfSpeech() { return partOfSpeech; }
public void setPartOfSpeech(String partOfSpeech) { this.partOfSpeech = partOfSpeech; }
public int getCount() { return count; }
public void setCount(int count) { this.count = count; }
}
}

View File

@ -57,7 +57,11 @@
<artifactId>neo4j-java-driver</artifactId> <artifactId>neo4j-java-driver</artifactId>
<version>4.4.12</version> <version>4.4.12</version>
</dependency> </dependency>
<dependency>
<groupId>com.hankcs</groupId>
<artifactId>hanlp</artifactId>
<version>portable-1.8.3</version>
</dependency>
</dependencies> </dependencies>
<build> <build>
<plugins> <plugins>

View File

@ -44,6 +44,7 @@ public class Application implements ApplicationRunner {
domainModel.setModifyTime(DateUtil.getDateNow()); domainModel.setModifyTime(DateUtil.getDateNow());
domainModel.setModifyUser("sa"); domainModel.setModifyUser("sa");
domainModel.setNodeCount(value); domainModel.setNodeCount(value);
domainModel.setStatus(1);
kgManagerService.updateDomain(domainModel); kgManagerService.updateDomain(domainModel);
}else { }else {
domainModel=KgDomain.builder() domainModel=KgDomain.builder()

View File

@ -0,0 +1,41 @@
package com.warmer.web.controller;
import com.warmer.base.util.R;
import com.warmer.web.model.CustomDictEntry;
import com.warmer.web.service.CustomDictService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.*;
@RestController
@RequestMapping("/customDict")
@CrossOrigin(origins = "*", maxAge = 3600)
public class CustomDictController {
@Autowired
private CustomDictService customDictService;
@GetMapping("/list")
public R<List<CustomDictEntry>> list(@RequestParam(required = false) String domain,
@RequestParam(required = false) Integer enabled) {
return R.success(customDictService.list(domain, enabled));
}
@PostMapping("/add")
public R<Boolean> add(@RequestBody Map<String, Object> body) {
String word = Objects.toString(body.get("word"), "");
String nature = Objects.toString(body.get("nature"), "n");
Integer freq = body.get("freq") == null ? 1000 : Integer.valueOf(body.get("freq").toString());
String domain = Objects.toString(body.get("domain"), null);
String source = Objects.toString(body.get("source"), "manual");
if (word.isEmpty()) return R.error("word不能为空");
boolean ok = customDictService.addWord(word, nature, freq, domain, source);
return ok ? R.success(true) : R.error("添加失败");
}
@PostMapping("/disable")
public R<Boolean> disable(@RequestParam Integer id) {
return customDictService.disable(id) ? R.success(true) : R.error("停用失败");
}
}

View File

@ -0,0 +1,62 @@
package com.warmer.web.controller;
import com.warmer.base.util.R;
import com.warmer.base.util.GraphPageRecord;
import com.warmer.web.service.KGGraphService;
import com.warmer.web.request.GraphQuery;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
@RestController
@RequestMapping("/kg/domain")
public class GraphDomainController extends BaseController {
@Autowired
private KGGraphService kgGraphService;
/**
* 领域分页支持按关键字过滤
* 请求体支持 GraphQuery.keywordspageIndexpageSize
*/
@PostMapping("/page")
@ResponseBody
public R<GraphPageRecord<HashMap<String, Object>>> page(@RequestBody GraphQuery query) {
GraphPageRecord<HashMap<String, Object>> data = kgGraphService.getPageDomain(query);
return R.success(data, "操作成功");
}
/**
* 删除指定领域标签
*/
@DeleteMapping("/{domain}")
@ResponseBody
public R<?> delete(@PathVariable("domain") String domain) {
if (StringUtils.isBlank(domain)) {
return R.error("领域名称不能为空");
}
kgGraphService.deleteKGDomain(domain);
return R.success(null, "删除成功");
}
/**
* 重命名领域标签
* 请求体{ "oldName": "...", "newName": "..." }
*/
@PutMapping("/rename")
@ResponseBody
public R<?> rename(@RequestBody HashMap<String, String> body) {
String oldName = body.get("oldName");
String newName = body.get("newName");
if (StringUtils.isBlank(oldName) || StringUtils.isBlank(newName)) {
return R.error("oldName/newName 不能为空");
}
if (StringUtils.equals(oldName, newName)) {
return R.error("新名称不能与旧名称相同");
}
kgGraphService.renameKGDomain(oldName, newName);
return R.success(null, "重命名成功");
}
}

View File

@ -0,0 +1,146 @@
package com.warmer.web.controller;
import com.warmer.base.util.R;
import com.warmer.web.request.IntelligentSearchRequest;
import com.warmer.web.service.IntelligentSearchService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
import java.util.List;
/**
* 智能搜索控制器
*
* @author tanc
*/
@RestController
@RequestMapping("/intelligent")
@CrossOrigin(origins = "*", maxAge = 3600)
public class IntelligentSearchController {
@Autowired
private IntelligentSearchService intelligentSearchService;
/**
* 智能搜索接口
*
* @param request 搜索请求对象
* @return 搜索结果
*/
@PostMapping("/search")
public R<Map<String, Object>> search(@RequestBody IntelligentSearchRequest request) {
try {
// 参数验证
if (StringUtils.isEmpty(request.getText())) {
return R.error("搜索文本不能为空");
}
if (StringUtils.isEmpty(request.getDomain())) {
request.setDomain("default");
}
Map<String, Object> result = intelligentSearchService.intelligentSearch(
request.getText(),
request.getDomain()
);
return R.success(result);
} catch (Exception e) {
return R.error("智能搜索失败: " + e.getMessage());
}
}
/**
* 仅通过文本搜索使用默认领域
*
* @param text 搜索文本
* @return 搜索结果
*/
@PostMapping("/searchByText")
public R<Map<String, Object>> searchByText(@RequestParam String text) {
try {
if (StringUtils.isEmpty(text)) {
return R.error("搜索文本不能为空");
}
// 使用默认领域进行搜索
Map<String, Object> result = intelligentSearchService.intelligentSearch(text, "default");
return R.success(result);
} catch (Exception e) {
return R.error("智能搜索失败: " + e.getMessage());
}
}
/**
* 简单的GET接口用于测试
*
* @param text 搜索文本
* @param domain 领域名称可选
* @return 搜索结果
*/
@GetMapping("/search")
public R<Map<String, Object>> searchGet(@RequestParam String text,
@RequestParam(required = false, defaultValue = "default") String domain) {
try {
if (StringUtils.isEmpty(text)) {
return R.error("搜索文本不能为空");
}
Map<String, Object> result = intelligentSearchService.intelligentSearch(text, domain);
return R.success(result);
} catch (Exception e) {
return R.error("智能搜索失败: " + e.getMessage());
}
}
/**
* 关键词提取接口
*
* @param requestBody 包含原始文本的请求体
* @return 提取的关键词列表
*/
@PostMapping("/extractKeywords")
public R<?> extractKeywords(@RequestBody Map<String, String> requestBody) {
try {
String text = requestBody.get("text");
if (StringUtils.isEmpty(text)) {
return R.error("输入文本不能为空");
}
// 使用关键词提取服务
List<com.warmer.base.service.KeywordExtractionService.KeywordInfo> keywordInfoList =
intelligentSearchService.getKeywordExtractionResult(text);
return R.success(keywordInfoList);
} catch (Exception e) {
return R.error("关键词提取失败: " + e.getMessage());
}
}
/**
* 获取搜索建议输入提示
*
* @param query 查询关键词
* @param domain 领域名称
* @return 节点名称建议列表
*/
@GetMapping("/suggestions")
public R<List<String>> getSuggestions(@RequestParam String query,
@RequestParam(required = false) String domain) {
try {
if (query == null || query.trim().isEmpty()) {
return R.success(new java.util.ArrayList<>());
}
// 如果没有指定领域使用默认领域
if (domain == null || domain.trim().isEmpty()) {
domain = "default";
}
List<String> suggestions = intelligentSearchService.getSearchSuggestions(query, domain);
return R.success(suggestions);
} catch (Exception e) {
return R.error("获取搜索建议失败: " + e.getMessage());
}
}
}

View File

@ -16,6 +16,7 @@ import com.warmer.web.entity.KgNodeDetailFile;
import com.warmer.web.model.NodeItem; import com.warmer.web.model.NodeItem;
import com.warmer.web.request.*; import com.warmer.web.request.*;
import com.warmer.web.service.FeedBackService; import com.warmer.web.service.FeedBackService;
import com.warmer.web.service.IntelligentSearchService;
import com.warmer.web.service.KGGraphService; import com.warmer.web.service.KGGraphService;
import com.warmer.web.service.KGManagerService; import com.warmer.web.service.KGManagerService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -40,6 +41,8 @@ public class KGBuilderController extends BaseController {
private KGManagerService kgManagerService; private KGManagerService kgManagerService;
@Autowired @Autowired
FeedBackService feedBackService; FeedBackService feedBackService;
@Autowired
private IntelligentSearchService intelligentSearchService;
/** /**
* 获取图谱标签列表存放mysql表 * 获取图谱标签列表存放mysql表
@ -75,7 +78,19 @@ public class KGBuilderController extends BaseController {
@PostMapping(value = "/queryGraphResult") @PostMapping(value = "/queryGraphResult")
public R<HashMap<String, Object>> queryGraphResult(@RequestBody GraphQuery query) { public R<HashMap<String, Object>> queryGraphResult(@RequestBody GraphQuery query) {
try { try {
// 如果启用了智能搜索先处理关键字提取
if (query.getEnableKeywordExtraction() != null && query.getEnableKeywordExtraction()) {
query = intelligentSearchService.processIntelligentSearch(query);
}
HashMap<String, Object> graphData = kgGraphService.queryGraphResult(query); HashMap<String, Object> graphData = kgGraphService.queryGraphResult(query);
// 如果是智能搜索添加关键字信息到返回结果
if (query.getEnableKeywordExtraction() != null && query.getEnableKeywordExtraction() &&
query.getKeywords() != null) {
graphData.put("extractedKeywords", query.getKeywords());
}
return R.success(graphData); return R.success(graphData);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();

View File

@ -0,0 +1,15 @@
package com.warmer.web.dao;
import com.warmer.web.model.CustomDictEntry;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface CustomDictDao {
List<CustomDictEntry> list(@Param("domain") String domain, @Param("enabled") Integer enabled);
int insert(CustomDictEntry entry);
int disable(@Param("id") Integer id);
CustomDictEntry getByWord(@Param("word") String word, @Param("domain") String domain);
}

View File

@ -1,9 +1,8 @@
package com.warmer.web.dao; package com.warmer.web.dao;
import com.warmer.base.util.GraphPageRecord;
import com.warmer.web.model.NodeItem; import com.warmer.web.model.NodeItem;
import com.warmer.web.request.GraphQuery; import com.warmer.web.request.GraphQuery;
import com.warmer.base.util.GraphPageRecord;
import com.warmer.web.request.NodeCoordinateItem; import com.warmer.web.request.NodeCoordinateItem;
import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Mapper;
@ -11,232 +10,233 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@Mapper @Mapper
public interface KGGraphDao { public interface KGGraphDao {
/** /**
* 领域标签分页 * 领域标签分页
* @param queryItem * @param queryItem
* @return * @return
*/ */
GraphPageRecord<HashMap<String, Object>> getPageDomain(GraphQuery queryItem); GraphPageRecord<HashMap<String, Object>> getPageDomain(GraphQuery queryItem);
/** /**
* 删除Neo4j 标签 * 删除Neo4j 标签
* *
* @param domain * @param domain
*/ */
void deleteKgDomain(String domain); void deleteKgDomain(String domain);
/** /**
* 查询图谱节点和关系 * 查询图谱节点和关系
* *
* @param query * @param query
* @return node relationship * @return node relationship
*/ */
HashMap<String, Object> queryGraphResult(GraphQuery query); HashMap<String, Object> queryGraphResult(GraphQuery query);
/** /**
* 获取节点列表 * 获取节点列表
* *
* @param domain * @param domain
* @param pageIndex * @param pageIndex
* @param pageSize * @param pageSize
* @return * @return
*/ */
HashMap<String, Object> getDomainNodes(String domain, Integer pageIndex, Integer pageSize); HashMap<String, Object> getDomainNodes(String domain, Integer pageIndex, Integer pageSize);
/** /**
* 获取某个领域指定节点拥有的上下级的节点数 * 获取某个领域指定节点拥有的上下级的节点数
* *
* @param domain * @param domain
* @param nodeId * @param nodeId
* @return long 数值 * @return long 数值
*/ */
long getRelationNodeCount(String domain, long nodeId); long getRelationNodeCount(String domain, long nodeId);
/** /**
* 创建领域,默认创建一个新的节点,给节点附上默认属性 * 创建领域,默认创建一个新的节点,给节点附上默认属性
* *
* @param domain * @param domain
*/ */
void createDomain(String domain); void createDomain(String domain);
void quickCreateDomain(String domain,String nodeName); void quickCreateDomain(String domain,String nodeName);
/** /**
* 获取/展开更多节点,找到和该节点有关系的节点 * 获取/展开更多节点,找到和该节点有关系的节点
* *
* @param domain * @param domain
* @param nodeId * @param nodeId
* @return * @return
*/ */
HashMap<String, Object> getMoreRelationNode(String domain, String nodeId); HashMap<String, Object> getMoreRelationNode(String domain, String nodeId);
/** /**
* 更新节点名称 * 更新节点名称
* *
* @param domain * @param domain
* @param nodeId * @param nodeId
* @param nodeName * @param nodeName
* @return 修改后的节点 * @return 修改后的节点
*/ */
HashMap<String, Object> updateNodeName(String domain, String nodeId, String nodeName); HashMap<String, Object> updateNodeName(String domain, String nodeId, String nodeName);
/** /**
* 创建单个节点 * 创建单个节点
* *
* @param domain * @param domain
* @param entity * @param entity
* @return * @return
*/ */
HashMap<String, Object> createNode(String domain, NodeItem entity); HashMap<String, Object> createNode(String domain, NodeItem entity);
HashMap<String, Object> createNodeWithUUid(String domain, NodeItem entity); HashMap<String, Object> createNodeWithUUid(String domain, NodeItem entity);
/** /**
* 批量创建节点和关系 * 批量创建节点和关系
* *
* @param domain * @param domain
* 领域 * 领域
* @param sourceName * @param sourceName
* 源节点 * 源节点
* @param relation * @param relation
* 关系 * 关系
* @param targetNames * @param targetNames
* 目标节点数组 * 目标节点数组
* @return * @return
*/ */
HashMap<String, Object> batchCreateNode(String domain, String sourceName, String relation, String[] targetNames); HashMap<String, Object> batchCreateNode(String domain, String sourceName, String relation, String[] targetNames);
/** /**
* 批量创建下级节点 * 批量创建下级节点
* *
* @param domain * @param domain
* 领域 * 领域
* @param sourceId * @param sourceId
* 源节点id * 源节点id
* @param entityType * @param entityType
* 节点类型 * 节点类型
* @param targetNames * @param targetNames
* 目标节点名称数组 * 目标节点名称数组
* @param relation * @param relation
* 关系 * 关系
* @return * @return
*/ */
HashMap<String, Object> batchCreateChildNode(String domain, String sourceId, Integer entityType, HashMap<String, Object> batchCreateChildNode(String domain, String sourceId, Integer entityType,
String[] targetNames, String relation); String[] targetNames, String relation);
/** /**
* 批量创建同级节点 * 批量创建同级节点
* *
* @param domain * @param domain
* 领域 * 领域
* @param entityType * @param entityType
* 节点类型 * 节点类型
* @param sourceNames * @param sourceNames
* 节点名称 * 节点名称
* @return * @return
*/ */
List<HashMap<String, Object>> batchCreateSameNode(String domain, Integer entityType, String[] sourceNames); List<HashMap<String, Object>> batchCreateSameNode(String domain, Integer entityType, String[] sourceNames);
/** /**
* 添加关系 * 添加关系
* *
* @param domain * @param domain
* 领域 * 领域
* @param sourceId * @param sourceId
* 源节点id * 源节点id
* @param targetId * @param targetId
* 目标节点id * 目标节点id
* @param ship * @param ship
* 关系 * 关系
* @return * @return
*/ */
HashMap<String, Object> createLink(String domain, long sourceId, long targetId, String ship); HashMap<String, Object> createLink(String domain, long sourceId, long targetId, String ship);
HashMap<String, Object> createLinkByUuid(String domain, long sourceId, long targetId, String ship); HashMap<String, Object> createLinkByUuid(String domain, long sourceId, long targetId, String ship);
/** /**
* 更新关系 * 更新关系
* *
* @param domain * @param domain
* 领域 * 领域
* @param shipId * @param shipId
* 关系id * 关系id
* @param shipName * @param shipName
* 关系名称 * 关系名称
* @return * @return
*/ */
HashMap<String, Object> updateLink(String domain, long shipId, String shipName); HashMap<String, Object> updateLink(String domain, long shipId, String shipName);
/** /**
* 删除节点(先删除关系再删除节点) * 删除节点(先删除关系再删除节点)
* *
* @param domain * @param domain
* @param nodeId * @param nodeId
* @return * @return
*/ */
List<HashMap<String, Object>> deleteNode(String domain, long nodeId); List<HashMap<String, Object>> deleteNode(String domain, long nodeId);
/** /**
* 删除关系 * 删除关系
* *
* @param domain * @param domain
* @param shipId * @param shipId
*/ */
void deleteLink(String domain, long shipId); void deleteLink(String domain, long shipId);
/** /**
* 段落识别出的三元组生成图谱 * 段落识别出的三元组生成图谱
* *
* @param domain * @param domain
* @param entityType * @param entityType
* @param operateType * @param operateType
* @param sourceId * @param sourceId
* @param rss * @param rss
* 关系三元组 * 关系三元组
* [[startname;ship;endname],[startname1;ship1;endname1],[startname2;ship2;endname2]] * [[startname;ship;endname],[startname1;ship1;endname1],[startname2;ship2;endname2]]
* @return node relationship * @return node relationship
*/ */
HashMap<String, Object> createGraphByText(String domain, Integer entityType, Integer operateType, Integer sourceId, HashMap<String, Object> createGraphByText(String domain, Integer entityType, Integer operateType, Integer sourceId,
String[] rss); String[] rss);
/** /**
* 批量创建节点关系 * 批量创建节点关系
* @param domain * @param domain
* @param params 三元组 sourceNode,relationship,targetNode * @param params 三元组 sourceNode,relationship,targetNode
*/ */
void batchCreateGraph(String domain, List<Map<String,Object>> params); void batchCreateGraph(String domain, List<Map<String,Object>> params);
/** /**
* 批量更新节点坐标 * 批量更新节点坐标
* @param domain * @param domain
* @param params * @param params
*/ */
void batchUpdateGraphNodesCoordinate(String domain,List<NodeCoordinateItem> params); void batchUpdateGraphNodesCoordinate(String domain,List<NodeCoordinateItem> params);
/** /**
* 更新节点有无附件 * 更新节点有无附件
* @param domain * @param domain
* @param nodeId * @param nodeId
* @param status * @param status
*/ */
void updateNodeFileStatus(String domain,long nodeId, int status); void updateNodeFileStatus(String domain,long nodeId, int status);
/** /**
* 更新图谱节点的图片 * 更新图谱节点的图片
* @param domain * @param domain
* @param nodeId * @param nodeId
* @param img * @param img
*/ */
void updateNodeImg(String domain, long nodeId, String img); void updateNodeImg(String domain, long nodeId, String img);
/** /**
* 移除节点图片 * 移除节点图片
* @param domain * @param domain
* @param nodeId * @param nodeId
*/ */
void removeNodeImg(String domain, long nodeId); void removeNodeImg(String domain, long nodeId);
/** /**
* 导入csv * 导入csv
* @param domain * @param domain
* @param csvUrl * @param csvUrl
* @param status * @param status
*/ */
void batchInsertByCsv(String domain, String csvUrl, int status) ; void batchInsertByCsv(String domain, String csvUrl, int status) ;
void updateCoordinateOfNode(String domain, String uuid, Double fx, Double fy); void updateCoordinateOfNode(String domain, String uuid, Double fx, Double fy);
// 新增重命名领域标签
void renameKgDomain(String oldDomain, String newDomain);
} }

View File

@ -19,32 +19,50 @@ import java.util.Map;
@Repository @Repository
public class KGGraphRepository implements KGGraphDao { public class KGGraphRepository implements KGGraphDao {
/** /**
* 领域标签分页 * 领域标签分页支持按关键字过滤
*/ */
@Override @Override
public GraphPageRecord<HashMap<String, Object>> getPageDomain(GraphQuery queryItem) { public GraphPageRecord<HashMap<String, Object>> getPageDomain(GraphQuery queryItem) {
GraphPageRecord<HashMap<String, Object>> resultRecord = new GraphPageRecord<HashMap<String, Object>>(); GraphPageRecord<HashMap<String, Object>> resultRecord = new GraphPageRecord<HashMap<String, Object>>();
try { try {
String totalCountQuery = "MATCH (n) RETURN count(distinct labels(n)) as count"; String[] keywords = queryItem.getKeywords();
long totalCount = 0; String whereClause = "";
totalCount = Neo4jUtil.getGraphValue(totalCountQuery); if (keywords != null && keywords.length > 0) {
if (totalCount > 0) { List<String> conds = new ArrayList<>();
int skipCount = (queryItem.getPageIndex() - 1) * queryItem.getPageSize(); for (String kw : keywords) {
int limitCount = queryItem.getPageSize(); if (!StringUtil.isBlank(kw)) {
String domainSql = String.format( conds.add(String.format("domain CONTAINS '%s'", kw));
"START n=node(*) RETURN distinct labels(n) as domain,count(n) as nodeCount order by nodeCount desc SKIP %s LIMIT %s", }
skipCount, limitCount); }
List<HashMap<String, Object>> pageList = Neo4jUtil.getGraphNode(domainSql); if (conds.size() > 0) {
resultRecord.setPageIndex(queryItem.getPageIndex()); whereClause = "WHERE " + String.join(" OR ", conds);
resultRecord.setPageSize(queryItem.getPageSize()); }
resultRecord.setTotalCount(totalCount);
resultRecord.setNodeList(pageList);
} }
// total count of distinct labels, with optional filter
String totalCountQuery = String.format(
"MATCH (n) UNWIND labels(n) AS label WITH DISTINCT label AS domain %s RETURN count(domain) AS count",
whereClause
);
long totalCount = Neo4jUtil.getGraphValue(totalCountQuery);
int skipCount = (queryItem.getPageIndex() - 1) * queryItem.getPageSize();
int limitCount = queryItem.getPageSize();
// page query: domain label + node count, ordered desc
String domainSql = String.format(
"MATCH (n) UNWIND labels(n) AS label WITH label AS domain, n %s " +
"RETURN domain, count(n) AS nodeCount ORDER BY nodeCount DESC SKIP %s LIMIT %s",
whereClause, skipCount, limitCount
);
List<HashMap<String, Object>> pageList = Neo4jUtil.getGraphNode(domainSql);
resultRecord.setPageIndex(queryItem.getPageIndex());
resultRecord.setPageSize(queryItem.getPageSize());
resultRecord.setTotalCount(totalCount);
resultRecord.setNodeList(pageList);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
return resultRecord; return resultRecord;
} }
@ -60,7 +78,6 @@ public class KGGraphRepository implements KGGraphDao {
Neo4jUtil.runCypherSql(deleteNode); Neo4jUtil.runCypherSql(deleteNode);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
@ -723,4 +740,15 @@ public class KGGraphRepository implements KGGraphDao {
} }
Neo4jUtil.runCypherSql(cypher); Neo4jUtil.runCypherSql(cypher);
} }
@Override
public void renameKgDomain(String oldDomain, String newDomain) {
try {
String addLabel = String.format("MATCH (n:`%s`) SET n:`%s`", oldDomain, newDomain);
Neo4jUtil.runCypherSql(addLabel);
String removeLabel = String.format("MATCH (n:`%s`) REMOVE n:`%s`", oldDomain, oldDomain);
Neo4jUtil.runCypherSql(removeLabel);
} catch (Exception e) {
e.printStackTrace();
}
}
} }

View File

@ -0,0 +1,84 @@
package com.warmer.web.model;
public class CustomDictEntry {
private Integer id;
private String word;
private String nature;
private Integer freq;
private String domain;
private Boolean enabled;
private String source;
private java.sql.Timestamp createdAt;
private java.sql.Timestamp updatedAt;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getWord() {
return word;
}
public void setWord(String word) {
this.word = word;
}
public String getNature() {
return nature;
}
public void setNature(String nature) {
this.nature = nature;
}
public Integer getFreq() {
return freq;
}
public void setFreq(Integer freq) {
this.freq = freq;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public java.sql.Timestamp getCreatedAt() {
return createdAt;
}
public void setCreatedAt(java.sql.Timestamp createdAt) {
this.createdAt = createdAt;
}
public java.sql.Timestamp getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(java.sql.Timestamp updatedAt) {
this.updatedAt = updatedAt;
}
}

View File

@ -14,4 +14,10 @@ public class GraphQuery{
private int matchType; private int matchType;
private int pageSize = 10; private int pageSize = 10;
private int pageIndex = 1; private int pageIndex = 1;
// 新增字段支持智能搜索
private String originalText; // 原始故障描述文本
private String[] keywords; // 提取的关键字数组
private Integer searchMode; // 搜索模式0=传统单关键字1=智能多关键字2=语义搜索
private Boolean enableKeywordExtraction; // 是否启用关键字提取
} }

View File

@ -0,0 +1,42 @@
package com.warmer.web.request;
import lombok.Data;
/**
* 智能搜索请求对象
*
* @author tanc
*/
@Data
public class IntelligentSearchRequest {
/**
* 输入的文本内容
*/
private String text;
/**
* 搜索的领域/图谱名称
*/
private String domain;
/**
* 搜索模式0=关键词匹配1=模糊匹配2=语义搜索
*/
private Integer searchMode = 0;
/**
* 是否返回关键词提取结果
*/
private Boolean includeKeywords = true;
/**
* 最大返回结果数量
*/
private Integer maxResults = 20;
/**
* 是否包含关系信息
*/
private Boolean includeRelations = true;
}

View File

@ -0,0 +1,14 @@
package com.warmer.web.service;
import com.warmer.web.model.CustomDictEntry;
import java.util.List;
public interface CustomDictService {
void loadAllToHanLPIfNeeded();
void refreshDomainToHanLP(String domain);
List<CustomDictEntry> list(String domain, Integer enabled);
boolean addWord(String word, String nature, Integer freq, String domain, String source);
boolean disable(Integer id);
boolean exists(String word, String domain);
}

View File

@ -0,0 +1,96 @@
package com.warmer.web.service;
import com.warmer.base.service.KeywordExtractionService;
import com.warmer.web.request.GraphQuery;
import java.util.*;
/**
* 智能搜索服务接口
*
* @author tanc
*/
public interface IntelligentSearchService {
/**
* 智能搜索 - 基于文本直接搜索知识图谱
*
* @param text 输入文本
* @param domain 搜索领域
* @return 搜索结果
*/
Map<String, Object> intelligentSearch(String text, String domain);
/**
* 根据关键词搜索节点
*
* @param keyword 关键词
* @param domain 搜索领域
* @return 搜索结果
*/
Map<String, Object> searchNodesByKeyword(String keyword, String domain);
/**
* 创建空的搜索结果
*
* @return 空的搜索结果
*/
Map<String, Object> createEmptySearchResult();
/**
* 去除重复的节点
*
* @param nodes 节点列表
* @return 去重后的节点列表
*/
List<Map<String, Object>> removeDuplicateNodes(List<Map<String, Object>> nodes);
/**
* 去除重复的关系
*
* @param relations 关系列表
* @return 去重后的关系列表
*/
List<Map<String, Object>> removeDuplicateRelations(List<Map<String, Object>> relations);
/**
* 处理智能搜索请求
*
* @param query 搜索请求
* @return 处理后的搜索请求
*/
GraphQuery processIntelligentSearch(GraphQuery query);
/**
* 获取关键字提取结果用于前端显示
*
* @param text 原始文本
* @return 关键字信息列表
*/
List<KeywordExtractionService.KeywordInfo> getKeywordExtractionResult(String text);
/**
* 获取关键词提取结果
*
* @param text 输入文本
* @return 关键词提取结果
*/
List<KeywordExtractionService.KeywordInfo> getKeywordExtractionResultMap(String text);
/**
* 获取搜索建议
*
* @param query 查询关键词
* @param domain 领域名称
* @return 节点名称建议列表
*/
List<String> getSearchSuggestions(String query, String domain);
/**
* 提取关键词简单版本
*
* @param text 原始文本
* @return 关键词列表
*/
List<String> extractKeywords(String text);
}

View File

@ -24,6 +24,8 @@ public interface KGGraphService {
* @param domain * @param domain
*/ */
void deleteKGDomain(String domain); void deleteKGDomain(String domain);
// 新增重命名领域标签
void renameKGDomain(String oldDomain, String newDomain);
/** /**
* 查询图谱节点和关系 * 查询图谱节点和关系

View File

@ -0,0 +1,76 @@
package com.warmer.web.service.impl;
import com.hankcs.hanlp.dictionary.CustomDictionary;
import com.warmer.web.dao.CustomDictDao;
import com.warmer.web.model.CustomDictEntry;
import com.warmer.web.service.CustomDictService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
@Service
public class CustomDictServiceImpl implements CustomDictService {
@Autowired
private CustomDictDao customDictDao;
private final AtomicBoolean loaded = new AtomicBoolean(false);
@Override
public void loadAllToHanLPIfNeeded() {
if (loaded.compareAndSet(false, true)) {
List<CustomDictEntry> all = customDictDao.list(null, 1);
for (CustomDictEntry e : all) {
try {
CustomDictionary.add(e.getWord(), (e.getNature() == null ? "n" : e.getNature()) + " " + (e.getFreq() == null ? 1000 : e.getFreq()));
} catch (Exception ignored) {}
}
}
}
@Override
public void refreshDomainToHanLP(String domain) {
List<CustomDictEntry> items = customDictDao.list(domain, 1);
for (CustomDictEntry e : items) {
try {
CustomDictionary.add(e.getWord(), (e.getNature() == null ? "n" : e.getNature()) + " " + (e.getFreq() == null ? 1000 : e.getFreq()));
} catch (Exception ignored) {}
}
}
@Override
public List<CustomDictEntry> list(String domain, Integer enabled) {
return customDictDao.list(domain, enabled);
}
@Override
public boolean addWord(String word, String nature, Integer freq, String domain, String source) {
if (exists(word, domain)) return true;
CustomDictEntry e = new CustomDictEntry();
e.setWord(word);
e.setNature(nature == null ? "n" : nature);
e.setFreq(freq == null ? 1000 : freq);
e.setDomain(domain);
e.setEnabled(true);
e.setSource(source == null ? "manual" : source);
int ret = customDictDao.insert(e);
if (ret > 0) {
try {
CustomDictionary.add(e.getWord(), e.getNature() + " " + e.getFreq());
} catch (Exception ignored) {}
return true;
}
return false;
}
@Override
public boolean disable(Integer id) {
return customDictDao.disable(id) > 0;
}
@Override
public boolean exists(String word, String domain) {
return customDictDao.getByWord(word, domain) != null;
}
}

View File

@ -0,0 +1,323 @@
package com.warmer.web.service.impl;
import com.warmer.base.service.KeywordExtractionService;
import com.warmer.base.util.Neo4jUtil;
import com.warmer.web.request.GraphQuery;
import com.warmer.web.service.IntelligentSearchService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import java.util.*;
import java.util.stream.Collectors;
/**
* 智能搜索服务实现类
*
* @author tanc
*/
@Service
public class IntelligentSearchServiceImpl implements IntelligentSearchService {
@Autowired
private KeywordExtractionService keywordExtractionService;
@Autowired
private com.warmer.web.service.CustomDictService customDictService;
@Override
public Map<String, Object> intelligentSearch(String text, String domain) {
Map<String, Object> result = new HashMap<>();
try {
// 1. 关键词提取
List<String> keywords = extractKeywords(text);
// 2. 在知识图谱中搜索匹配的节点和关系
Map<String, Object> searchResult = searchInKnowledgeGraph(keywords, domain);
// 3. 整合结果
result.put("success", true);
result.put("originalText", text);
result.put("domain", domain);
result.put("extractedKeywords", keywords);
result.put("nodes", searchResult.get("nodes"));
result.put("links", searchResult.get("relationships")); // 前端期望的字段名
result.put("relationships", searchResult.get("relationships")); // 保持兼容性
result.put("matchedCount", searchResult.get("matchedCount"));
} catch (Exception e) {
result.put("success", false);
result.put("message", "搜索过程中发生错误: " + e.getMessage());
result.put("originalText", text);
result.put("domain", domain);
result.put("nodes", new ArrayList<>());
result.put("links", new ArrayList<>());
result.put("relationships", new ArrayList<>());
}
return result;
}
/**
* 判断是否为停用词
*/
private boolean isStopWord(String word) {
Set<String> stopWords = new HashSet<>(Arrays.asList(
"", "", "", "", "", "", "", "", "", "", "", "", "一个", "", "", "", "", "", "", "", "", "", "", "没有", "", "", "自己", "",
"the", "a", "an", "and", "or", "but", "in", "on", "at", "to", "for", "of", "with", "by", "is", "are", "was", "were", "be", "been", "have", "has", "had", "do", "does", "did", "will", "would", "could", "should"
));
return stopWords.contains(word.toLowerCase());
}
/**
* 在知识图谱中搜索匹配的节点和关系
*/
private Map<String, Object> searchInKnowledgeGraph(List<String> keywords, String domain) {
Map<String, Object> result = new HashMap<>();
List<HashMap<String, Object>> allNodes = new ArrayList<>();
List<HashMap<String, Object>> allRelationships = new ArrayList<>();
Set<String> processedNodeIds = new HashSet<>();
int matchedCount = 0;
if (keywords.isEmpty() || StringUtils.isEmpty(domain)) {
result.put("nodes", allNodes);
result.put("relationships", allRelationships);
result.put("matchedCount", matchedCount);
return result;
}
try {
// 为每个关键词搜索匹配的节点
for (String keyword : keywords) {
// 构建Cypher查询语句使用CONTAINS进行模糊匹配
String cypherSql = String.format(
"MATCH (n:`%s`) WHERE n.name CONTAINS '%s' " +
"OPTIONAL MATCH (n)-[r]-(m:`%s`) " +
"RETURN n, r, m LIMIT 10",
domain, keyword, domain
);
// 执行查询
HashMap<String, Object> searchResult = Neo4jUtil.getGraphNodeAndShip(cypherSql);
if (searchResult != null) {
// 处理节点 - Neo4jUtil返回的字段是 "node"
@SuppressWarnings("unchecked")
List<HashMap<String, Object>> nodes = (List<HashMap<String, Object>>) searchResult.get("node");
if (nodes != null) {
for (HashMap<String, Object> node : nodes) {
String nodeId = String.valueOf(node.get("uuid"));
if (!processedNodeIds.contains(nodeId)) {
allNodes.add(node);
processedNodeIds.add(nodeId);
matchedCount++;
}
}
}
// 处理关系 - Neo4jUtil返回的字段是 "relationship"
@SuppressWarnings("unchecked")
List<HashMap<String, Object>> relationships = (List<HashMap<String, Object>>) searchResult.get("relationship");
if (relationships != null) {
allRelationships.addAll(relationships);
}
}
}
// 如果没有找到匹配的节点尝试更宽泛的搜索
if (allNodes.isEmpty() && !keywords.isEmpty()) {
// 搜索包含任意关键词的节点
String keywordPattern = String.join("|", keywords);
String cypherSql = String.format(
"MATCH (n:`%s`) WHERE n.name =~ '.*(%s).*' " +
"RETURN n LIMIT 20",
domain, keywordPattern
);
List<HashMap<String, Object>> nodes = Neo4jUtil.getGraphNode(cypherSql);
if (nodes != null) {
allNodes.addAll(nodes);
matchedCount = nodes.size();
}
}
} catch (Exception e) {
// 记录错误但不抛出异常返回空结果
System.err.println("搜索知识图谱时发生错误: " + e.getMessage());
}
// 去重关系
allRelationships = allRelationships.stream()
.collect(Collectors.toMap(
rel -> rel.get("uuid"),
rel -> rel,
(existing, replacement) -> existing
))
.values()
.stream()
.collect(Collectors.toList());
// 确保返回空列表而不是null
result.put("nodes", allNodes != null ? allNodes : new ArrayList<>());
result.put("relationships", allRelationships != null ? allRelationships : new ArrayList<>());
result.put("matchedCount", matchedCount);
return result;
}
@Override
public Map<String, Object> searchNodesByKeyword(String keyword, String domain) {
try {
// 如果没有指定领域尝试在所有领域中搜索
if (domain == null || domain.trim().isEmpty()) {
// 可以在这里实现跨领域搜索逻辑
// 暂时返回空结果
return createEmptySearchResult();
}
// 构建Cypher查询语句使用CONTAINS进行模糊匹配
String cypherSql = String.format(
"MATCH (n:`%s`) WHERE n.name CONTAINS '%s' " +
"OPTIONAL MATCH (n)-[r]-(m:`%s`) " +
"RETURN n, r, m LIMIT 10",
domain, keyword, domain
);
// 执行查询
return Neo4jUtil.getGraphNodeAndShip(cypherSql);
} catch (Exception e) {
System.err.println("搜索节点时出错: " + e.getMessage());
return createEmptySearchResult();
}
}
@Override
public Map<String, Object> createEmptySearchResult() {
Map<String, Object> result = new HashMap<>();
result.put("nodes", new ArrayList<>());
result.put("relationships", new ArrayList<>());
return result;
}
@Override
public List<Map<String, Object>> removeDuplicateNodes(List<Map<String, Object>> nodes) {
Map<String, Map<String, Object>> uniqueNodes = new HashMap<>();
for (Map<String, Object> node : nodes) {
String nodeId = String.valueOf(node.get("uuid"));
if (nodeId != null && !uniqueNodes.containsKey(nodeId)) {
uniqueNodes.put(nodeId, node);
}
}
return new ArrayList<>(uniqueNodes.values());
}
@Override
public List<Map<String, Object>> removeDuplicateRelations(List<Map<String, Object>> relations) {
Map<String, Map<String, Object>> uniqueRelations = new HashMap<>();
for (Map<String, Object> relation : relations) {
String relationKey = relation.get("sourceId") + "-" + relation.get("targetId") + "-" + relation.get("relation");
if (!uniqueRelations.containsKey(relationKey)) {
uniqueRelations.put(relationKey, relation);
}
}
return new ArrayList<>(uniqueRelations.values());
}
@Override
public GraphQuery processIntelligentSearch(GraphQuery query) {
// 处理智能搜索请求的逻辑
if (query != null && query.getOriginalText() != null) {
// 提取关键词并设置到查询对象中
List<String> keywords = extractKeywords(query.getOriginalText());
// 设置提取的关键词到查询对象中
if (keywords != null && !keywords.isEmpty()) {
query.setKeywords(keywords.toArray(new String[0]));
}
}
return query;
}
@Override
public List<KeywordExtractionService.KeywordInfo> getKeywordExtractionResult(String text) {
return keywordExtractionService.extractKeywordsWithInfo(text);
}
@Override
public List<KeywordExtractionService.KeywordInfo> getKeywordExtractionResultMap(String text) {
return keywordExtractionService.extractKeywordsWithInfo(text);
}
@Override
public List<String> extractKeywords(String text) {
// 保障DB词典已加载到HanLP避免每次都改代码
customDictService.loadAllToHanLPIfNeeded();
return keywordExtractionService.extractKeywords(text);
/* 本地简单实现如果不想使用KeywordExtractionService可以使用这个
if (StringUtils.isEmpty(text)) {
return new ArrayList<>();
}
// 移除标点符号和特殊字符保留中文英文和数字
String cleanText = text.replaceAll("[\\p{Punct}\\s]+", " ");
// 分词简单按空格分割实际项目中可以使用jieba等分词工具
String[] words = cleanText.split("\\s+");
List<String> keywords = new ArrayList<>();
for (String word : words) {
word = word.trim();
if (!StringUtils.isEmpty(word) && word.length() > 1) {
// 过滤掉常见的停用词
if (!isStopWord(word)) {
keywords.add(word);
}
}
}
// 去重并返回
return keywords.stream().distinct().collect(Collectors.toList());
*/
}
@Override
public List<String> getSearchSuggestions(String query, String domain) {
List<String> suggestions = new ArrayList<>();
try {
if (query == null || query.trim().isEmpty() || domain == null || domain.trim().isEmpty()) {
return suggestions;
}
// 构建Cypher查询获取包含查询关键词的节点名称
String cypherSql = String.format(
"MATCH (n:`%s`) WHERE n.name CONTAINS '%s' " +
"RETURN DISTINCT n.name as name LIMIT 10",
domain, query.trim()
);
// 执行查询
List<HashMap<String, Object>> results = Neo4jUtil.getGraphTable(cypherSql);
if (results != null && !results.isEmpty()) {
for (HashMap<String, Object> result : results) {
Object nameObj = result.get("name");
if (nameObj != null) {
String name = nameObj.toString();
// 移除可能的引号
name = name.replaceAll("^\"|\"$", "");
if (!name.isEmpty()) {
suggestions.add(name);
}
}
}
}
} catch (Exception e) {
System.err.println("获取搜索建议时出错: " + e.getMessage());
}
return suggestions;
}
}

View File

@ -54,7 +54,11 @@ public class KGGraphServiceImpl implements KGGraphService {
public void deleteKGDomain(String domain) { public void deleteKGDomain(String domain) {
kgRepository.deleteKgDomain(domain); kgRepository.deleteKgDomain(domain);
} }
// 新增重命名领域标签
@Override
public void renameKGDomain(String oldDomain, String newDomain) {
kgRepository.renameKgDomain(oldDomain, newDomain);
}
@Override @Override
public HashMap<String, Object> queryGraphResult(GraphQuery query) { public HashMap<String, Object> queryGraphResult(GraphQuery query) {
return kgRepository.queryGraphResult(query); return kgRepository.queryGraphResult(query);

View File

@ -1,40 +0,0 @@
#本配置文件中的路径的根目录,根目录+其他路径=完整路径支持相对路径请参考https://github.com/hankcs/HanLP/pull/254
#Windows用户请注意路径分隔符统一使用/
root=D:/HanLP/
#核心词典路径
CoreDictionaryPath=data/dictionary/CoreNatureDictionary.txt
#2元语法词典路径
BiGramDictionaryPath=data/dictionary/CoreNatureDictionary.ngram.txt
#停用词词典路径
CoreStopWordDictionaryPath=data/dictionary/stopwords.txt
#同义词词典路径
CoreSynonymDictionaryDictionaryPath=data/dictionary/synonym/CoreSynonym.txt
#人名词典路径
PersonDictionaryPath=data/dictionary/person/nr.txt
#人名词典转移矩阵路径
PersonDictionaryTrPath=data/dictionary/person/nr.tr.txt
#繁简词典根目录
tcDictionaryRoot=data/dictionary/tc
#自定义词典路径,用;隔开多个自定义词典,空格开头表示在同一个目录,使用“文件名 词性”形式则表示这个词典的词性默认是该词性。优先级递减。
#另外data/dictionary/custom/CustomDictionary.txt是个高质量的词库请不要删除。所有词典统一使用【UTF-8】编码。
#注意每次更新自己定义的新词典myDict.txt的内容时,要删除同目录下的词典缓存文件CustomDictionary.txt.bin
CustomDictionaryPath=data/dictionary/custom/CustomDictionary.txt; 现代汉语补充词库.txt; 全国地名大全.txt ns; 人名词典.txt; 机构名词典.txt; 上海地名.txt ns;data/dictionary/person/nrf.txt nrf;
#CRF分词模型路径
CRFSegmentModelPath=data/model/segment/CRFSegmentModel.txt
#HMM分词模型
HMMSegmentModelPath=data/model/segment/HMMSegmentModel.bin
#分词结果是否展示词性
ShowTermNature=true
#IO适配器实现com.hankcs.hanlp.corpus.io.IIOAdapter接口以在不同的平台Hadoop、Redis等上运行HanLP
#默认的IO适配器如下该适配器是基于普通文件系统的。
#IOAdapter=com.hankcs.hanlp.corpus.io.FileIOAdapter
#感知机词法分析器
PerceptronCWSModelPath=data/model/perceptron/pku199801/cws.bin
PerceptronPOSModelPath=data/model/perceptron/pku199801/pos.bin
PerceptronNERModelPath=data/model/perceptron/pku199801/ner.bin
#CRF词法分析器
CRFCWSModelPath=data/model/crf/pku199801/cws.bin
CRFPOSModelPath=data/model/crf/pku199801/pos.bin
CRFNERModelPath=data/model/crf/pku199801/ner.bin
#更多配置项请参考 https://github.com/hankcs/HanLP/blob/master/src/main/java/com/hankcs/hanlp/HanLP.java#L59 自行添加

View File

@ -1,244 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.warmer.kgbuilder.dao.CorpusDao">
<!-- 语料库文档结果映射 -->
<resultMap id="CorpusDocumentResultMap" type="com.warmer.kgbuilder.domain.CorpusDocument">
<id property="id" column="id" jdbcType="INTEGER"/>
<result property="domainId" column="domain_id" jdbcType="INTEGER"/>
<result property="title" column="title" jdbcType="VARCHAR"/>
<result property="content" column="content" jdbcType="LONGVARCHAR"/>
<result property="documentType" column="document_type" jdbcType="VARCHAR"/>
<result property="source" column="source" jdbcType="VARCHAR"/>
<result property="status" column="status" jdbcType="INTEGER"/>
<result property="createUser" column="create_user" jdbcType="VARCHAR"/>
<result property="createTime" column="create_time" jdbcType="TIMESTAMP"/>
<result property="updateUser" column="update_user" jdbcType="VARCHAR"/>
<result property="updateTime" column="update_time" jdbcType="TIMESTAMP"/>
</resultMap>
<!-- 关键词权重结果映射 -->
<resultMap id="KeywordWeightResultMap" type="com.warmer.kgbuilder.domain.KeywordWeight">
<id property="id" column="id" jdbcType="INTEGER"/>
<result property="domainId" column="domain_id" jdbcType="INTEGER"/>
<result property="keyword" column="keyword" jdbcType="VARCHAR"/>
<result property="weight" column="weight" jdbcType="DECIMAL"/>
<result property="category" column="category" jdbcType="VARCHAR"/>
<result property="description" column="description" jdbcType="VARCHAR"/>
<result property="status" column="status" jdbcType="INTEGER"/>
<result property="createUser" column="create_user" jdbcType="VARCHAR"/>
<result property="createTime" column="create_time" jdbcType="TIMESTAMP"/>
<result property="updateUser" column="update_user" jdbcType="VARCHAR"/>
<result property="updateTime" column="update_time" jdbcType="TIMESTAMP"/>
</resultMap>
<!-- 词频统计结果映射 -->
<resultMap id="TermFrequencyResultMap" type="com.warmer.kgbuilder.domain.TermFrequency">
<id property="id" column="id" jdbcType="BIGINT"/>
<result property="domainId" column="domain_id" jdbcType="INTEGER"/>
<result property="term" column="term" jdbcType="VARCHAR"/>
<result property="documentFrequency" column="document_frequency" jdbcType="INTEGER"/>
<result property="totalFrequency" column="total_frequency" jdbcType="INTEGER"/>
<result property="idfValue" column="idf_value" jdbcType="DECIMAL"/>
<result property="lastUpdate" column="last_update" jdbcType="TIMESTAMP"/>
</resultMap>
<!-- ========== 语料库文档查询 ========== -->
<select id="queryDocumentsByDomain" resultMap="CorpusDocumentResultMap">
SELECT * FROM kg_corpus_document
WHERE domain_id = #{domainId} AND status = 1
ORDER BY create_time DESC
</select>
<select id="queryDocumentsByType" resultMap="CorpusDocumentResultMap">
SELECT * FROM kg_corpus_document
WHERE domain_id = #{domainId} AND document_type = #{documentType} AND status = 1
ORDER BY create_time DESC
</select>
<select id="queryAllActiveDocuments" resultMap="CorpusDocumentResultMap">
SELECT * FROM kg_corpus_document
WHERE status = 1
ORDER BY domain_id, create_time DESC
</select>
<select id="queryDocumentById" resultMap="CorpusDocumentResultMap">
SELECT * FROM kg_corpus_document WHERE id = #{id}
</select>
<insert id="insertDocument" parameterType="com.warmer.kgbuilder.domain.CorpusDocument">
INSERT INTO kg_corpus_document (
domain_id, title, content, document_type, source, status,
create_user, create_time, update_user, update_time
) VALUES (
#{domainId}, #{title}, #{content}, #{documentType}, #{source}, #{status},
#{createUser}, #{createTime}, #{updateUser}, #{updateTime}
)
</insert>
<insert id="batchInsertDocuments">
INSERT INTO kg_corpus_document (
domain_id, title, content, document_type, source, status,
create_user, create_time, update_user, update_time
) VALUES
<foreach collection="documents" item="doc" separator=",">
(#{doc.domainId}, #{doc.title}, #{doc.content}, #{doc.documentType}, #{doc.source}, #{doc.status},
#{doc.createUser}, #{doc.createTime}, #{doc.updateUser}, #{doc.updateTime})
</foreach>
</insert>
<update id="updateDocument" parameterType="com.warmer.kgbuilder.domain.CorpusDocument">
UPDATE kg_corpus_document SET
title = #{title},
content = #{content},
document_type = #{documentType},
source = #{source},
status = #{status},
update_user = #{updateUser},
update_time = #{updateTime}
WHERE id = #{id}
</update>
<delete id="deleteDocument">
DELETE FROM kg_corpus_document WHERE id = #{id}
</delete>
<!-- ========== 关键词权重查询 ========== -->
<select id="queryKeywordWeightsByDomain" resultMap="KeywordWeightResultMap">
SELECT * FROM kg_keyword_weight
WHERE (domain_id = #{domainId} OR domain_id IS NULL) AND status = 1
ORDER BY weight DESC, keyword
</select>
<select id="queryGlobalKeywordWeights" resultMap="KeywordWeightResultMap">
SELECT * FROM kg_keyword_weight
WHERE domain_id IS NULL AND status = 1
ORDER BY weight DESC, keyword
</select>
<select id="queryKeywordWeightsByCategory" resultMap="KeywordWeightResultMap">
SELECT * FROM kg_keyword_weight
WHERE (domain_id = #{domainId} OR domain_id IS NULL)
AND category = #{category} AND status = 1
ORDER BY weight DESC, keyword
</select>
<select id="queryKeywordWeight" resultMap="KeywordWeightResultMap">
SELECT * FROM kg_keyword_weight
WHERE (domain_id = #{domainId} OR domain_id IS NULL)
AND keyword = #{keyword} AND status = 1
ORDER BY domain_id DESC LIMIT 1
</select>
<insert id="insertKeywordWeight" parameterType="com.warmer.kgbuilder.domain.KeywordWeight">
INSERT INTO kg_keyword_weight (
domain_id, keyword, weight, category, description, status,
create_user, create_time, update_user, update_time
) VALUES (
#{domainId}, #{keyword}, #{weight}, #{category}, #{description}, #{status},
#{createUser}, #{createTime}, #{updateUser}, #{updateTime}
)
</insert>
<insert id="batchInsertKeywordWeights">
INSERT INTO kg_keyword_weight (
domain_id, keyword, weight, category, description, status,
create_user, create_time, update_user, update_time
) VALUES
<foreach collection="weights" item="weight" separator=",">
(#{weight.domainId}, #{weight.keyword}, #{weight.weight}, #{weight.category}, #{weight.description}, #{weight.status},
#{weight.createUser}, #{weight.createTime}, #{weight.updateUser}, #{weight.updateTime})
</foreach>
</insert>
<update id="updateKeywordWeight" parameterType="com.warmer.kgbuilder.domain.KeywordWeight">
UPDATE kg_keyword_weight SET
weight = #{weight},
category = #{category},
description = #{description},
status = #{status},
update_user = #{updateUser},
update_time = #{updateTime}
WHERE id = #{id}
</update>
<delete id="deleteKeywordWeight">
DELETE FROM kg_keyword_weight WHERE id = #{id}
</delete>
<!-- ========== 停用词查询 ========== -->
<select id="queryStopWordsByDomain" resultType="String">
SELECT word FROM kg_stop_words
WHERE (domain_id = #{domainId} OR domain_id IS NULL) AND status = 1
ORDER BY word
</select>
<select id="queryGlobalStopWords" resultType="String">
SELECT word FROM kg_stop_words
WHERE domain_id IS NULL AND status = 1
ORDER BY word
</select>
<!-- ========== 词频统计查询 ========== -->
<select id="queryTermFrequenciesByDomain" resultMap="TermFrequencyResultMap">
SELECT * FROM kg_term_frequency
WHERE domain_id = #{domainId}
ORDER BY document_frequency DESC, term
</select>
<select id="queryTermFrequency" resultMap="TermFrequencyResultMap">
SELECT * FROM kg_term_frequency
WHERE domain_id = #{domainId} AND term = #{term}
</select>
<insert id="insertTermFrequency" parameterType="com.warmer.kgbuilder.domain.TermFrequency">
INSERT INTO kg_term_frequency (
domain_id, term, document_frequency, total_frequency, idf_value, last_update
) VALUES (
#{domainId}, #{term}, #{documentFrequency}, #{totalFrequency}, #{idfValue}, #{lastUpdate}
)
</insert>
<insert id="batchInsertTermFrequencies">
INSERT INTO kg_term_frequency (
domain_id, term, document_frequency, total_frequency, idf_value, last_update
) VALUES
<foreach collection="frequencies" item="freq" separator=",">
(#{freq.domainId}, #{freq.term}, #{freq.documentFrequency}, #{freq.totalFrequency}, #{freq.idfValue}, #{freq.lastUpdate})
</foreach>
</insert>
<update id="updateTermFrequency" parameterType="com.warmer.kgbuilder.domain.TermFrequency">
UPDATE kg_term_frequency SET
document_frequency = #{documentFrequency},
total_frequency = #{totalFrequency},
idf_value = #{idfValue},
last_update = #{lastUpdate}
WHERE id = #{id}
</update>
<update id="batchUpdateTermFrequencies">
<foreach collection="frequencies" item="freq" separator=";">
UPDATE kg_term_frequency SET
document_frequency = #{freq.documentFrequency},
total_frequency = #{freq.totalFrequency},
idf_value = #{freq.idfValue},
last_update = #{freq.lastUpdate}
WHERE id = #{freq.id}
</foreach>
</update>
<delete id="deleteTermFrequency">
DELETE FROM kg_term_frequency WHERE id = #{id}
</delete>
<select id="countDocumentsByDomain" resultType="int">
SELECT COUNT(*) FROM kg_corpus_document
WHERE domain_id = #{domainId} AND status = 1
</select>
</mapper>

View File

@ -0,0 +1,46 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.warmer.web.dao.CustomDictDao">
<resultMap id="CustomDictMap" type="com.warmer.web.model.CustomDictEntry">
<id property="id" column="id"/>
<result property="word" column="word"/>
<result property="nature" column="nature"/>
<result property="freq" column="freq"/>
<result property="domain" column="domain"/>
<result property="enabled" column="enabled"/>
<result property="source" column="source"/>
<result property="createdAt" column="created_at"/>
<result property="updatedAt" column="updated_at"/>
</resultMap>
<select id="list" resultMap="CustomDictMap">
SELECT * FROM kg_custom_dict
WHERE 1=1
<if test="domain != null and domain != ''">
AND domain = #{domain}
</if>
<if test="enabled != null">
AND enabled = #{enabled}
</if>
ORDER BY updated_at DESC
</select>
<select id="getByWord" resultMap="CustomDictMap">
SELECT * FROM kg_custom_dict
WHERE word = #{word}
<if test="domain != null and domain != ''">
AND domain = #{domain}
</if>
LIMIT 1
</select>
<insert id="insert" parameterType="com.warmer.web.model.CustomDictEntry" useGeneratedKeys="true" keyProperty="id">
INSERT INTO kg_custom_dict (word, nature, freq, domain, enabled, source)
VALUES (#{word}, #{nature}, #{freq}, #{domain}, #{enabled}, #{source})
</insert>
<update id="disable">
UPDATE kg_custom_dict SET enabled = 0 WHERE id = #{id}
</update>
</mapper>

View File

@ -50,7 +50,7 @@
<select id="getDomainByLabel" resultMap="BaseResultMap"> <select id="getDomainByLabel" resultMap="BaseResultMap">
SELECT SELECT
<include refid="Base_Column_List" /> <include refid="Base_Column_List" />
FROM kg_domain where status=1 and `label`=#{label} order by createTime desc FROM kg_domain where `label`=#{label} order by createTime desc LIMIT 1
</select> </select>
<select id="getDomainList" resultMap="BaseResultMap" > <select id="getDomainList" resultMap="BaseResultMap" >
SELECT SELECT
@ -96,6 +96,7 @@
name =#{name} , name =#{name} ,
nodeCount=#{nodeCount}, nodeCount=#{nodeCount},
shipCount=#{shipCount}, shipCount=#{shipCount},
status=#{status},
modifyUser=#{modifyUser} , modifyUser=#{modifyUser} ,
modifyTime=now() modifyTime=now()
WHERE id = #{id} WHERE id = #{id}

View File

@ -1,4 +1,2 @@
import kgBuilderApi from "./modules/kgBuilderApi"; export { default as kgBuilderApi } from "./modules/kgBuilderApi";
import datasourceApi from "./modules/datasourceApi"; export { default as datasourceApi } from "./modules/datasourceApi";
export { kgBuilderApi, datasourceApi };

View File

@ -85,6 +85,16 @@ class kgBuilderApi extends BaseAPI{
deleteDomain(data) { deleteDomain(data) {
return this.post("/deleteDomain",data); return this.post("/deleteDomain",data);
} }
renameDomain(data) {
return this.put("/kg/domain/rename", data);
}
renameDomain(data) {
return this.put("/kg/domain/rename", data, {
headers: {
'Content-Type': 'application/json'
}
});
}
getRecommendGraph(data) { getRecommendGraph(data) {
return this.post("/getRecommendGraph",data,{ return this.post("/getRecommendGraph",data,{
headers: { headers: {
@ -149,6 +159,21 @@ class kgBuilderApi extends BaseAPI{
download(data) { download(data) {
return this.get("/download/"+data,); return this.get("/download/"+data,);
} }
intelligentSearch(data) {
return this.post("/intelligent/search", data, {
headers: {
'Content-Type': 'application/json'
}
});
}
getSearchSuggestions(query, domain) {
return this.get("/intelligent/suggestions", {
query,
domain
});
}
updateCoordinateOfNode(data) { updateCoordinateOfNode(data) {
return this.post("/updateCoordinateOfNode",data,{ return this.post("/updateCoordinateOfNode",data,{
headers: { headers: {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 115 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 513 KiB

View File

@ -51,9 +51,9 @@
<span>{{ item.工具书名称 }}</span> <span>{{ item.工具书名称 }}</span>
</div> </div>
</div> </div>
<span class="datacontent" @click="linkto(item)" v-html="item.快照"> <span class="datacontent" @click="linkto(item)" v-html="item.快照"></span>
{{ item.快照 }} {{ item.快照 }}
</span>
</div> </div>
</div> </div>
</div> </div>

View File

@ -109,7 +109,12 @@ export default {
}, },
widht: null, widht: null,
height: null, height: null,
isAddLink: false isAddLink: false,
//
orbitAnimation: false,
orbitSpeed: 0.002,
orbitAngle: 0,
orbitTimer: null
}; };
}, },
watch: { watch: {
@ -142,6 +147,13 @@ export default {
_this.initGraph(); _this.initGraph();
}); });
}, },
beforeDestroy() {
//
if (this.orbitTimer) {
cancelAnimationFrame(this.orbitTimer);
this.orbitTimer = null;
}
},
methods: { methods: {
// //
initContainerRightClick(event) { initContainerRightClick(event) {
@ -220,8 +232,8 @@ export default {
// //
updateGraph() { updateGraph() {
const _this = this; const _this = this;
const lks = this.graph.links; const lks = this.graph.links || []; // undefined
const nodes = this.graph.nodes; const nodes = this.graph.nodes || []; // undefined
const links = []; const links = [];
nodes.forEach(function(n) { nodes.forEach(function(n) {
if (n.center === 1 || n.center === "1") { if (n.center === 1 || n.center === "1") {
@ -1237,6 +1249,194 @@ export default {
}, },
changeCursor() { changeCursor() {
d3.select(".BOX-SVG").style("cursor", "crosshair"); // d3.select(".BOX-SVG").style("cursor", "crosshair"); //
},
//
toggleOrbitAnimation() {
this.orbitAnimation = !this.orbitAnimation;
if (this.orbitAnimation) {
this.orbitAngle = 0; //
// simulation
this.simulation.alphaTarget(0.3).restart();
//
this.startOrbitLoop();
} else {
this.simulation.alphaTarget(0).restart();
//
if (this.orbitTimer) {
cancelAnimationFrame(this.orbitTimer);
this.orbitTimer = null;
}
//
this.restoreNodeFixedState();
}
return this.orbitAnimation;
},
//
restoreNodeFixedState() {
this.graph.nodes.forEach(function(d) {
if (d._originalFx !== undefined) {
d.fx = d._originalFx;
d.fy = d._originalFy;
delete d._originalFx;
delete d._originalFy;
}
});
},
//
startOrbitLoop() {
const _this = this;
const centerX = this.width / 2;
const centerY = this.height / 2;
//
const nodeInitials = [];
//
let hasCenterNode = false;
_this.graph.nodes.forEach(function(d) {
if (d.center === 1 || d.center === "1") {
hasCenterNode = true;
}
});
//
let centerNode = null;
if (!hasCenterNode && _this.graph.nodes.length > 0) {
let minDistance = Infinity;
_this.graph.nodes.forEach(function(d) {
const dx = d.x - centerX;
const dy = d.y - centerY;
const distance = Math.sqrt(dx * dx + dy * dy);
if (distance < minDistance) {
minDistance = distance;
centerNode = d;
}
});
console.log('未找到标记的中心节点,自动选择最接近中心的节点:', centerNode ? centerNode.name : 'null');
}
_this.graph.nodes.forEach(function(d) {
//
if (!d._originalFx) {
d._originalFx = d.fx;
d._originalFy = d.fy;
}
//
const isCenterNode = (d.center === 1 || d.center === "1") || (centerNode && d.uuid === centerNode.uuid);
if (!isCenterNode) {
//
const dx = d.x - centerX;
const dy = d.y - centerY;
const distance = Math.sqrt(dx * dx + dy * dy);
const angle = Math.atan2(dy, dx);
nodeInitials.push({
node: d,
distance: distance,
initialAngle: angle
});
} else {
console.log('中心节点:', d.name || d.uuid);
}
});
console.log('启动旋转动画,节点总数:', _this.graph.nodes.length, '可旋转节点数:', nodeInitials.length);
function animate() {
if (!_this.orbitAnimation) return;
_this.orbitAngle += _this.orbitSpeed;
//
nodeInitials.forEach(function(item) {
const newAngle = item.initialAngle + _this.orbitAngle;
item.node.x = centerX + item.distance * Math.cos(newAngle);
item.node.y = centerY + item.distance * Math.sin(newAngle);
});
//
_this.updateNodePositions();
//
_this.orbitTimer = requestAnimationFrame(animate);
}
this.orbitTimer = requestAnimationFrame(animate);
},
//
updateNodePositions() {
const _this = this;
// - circleg
this.nodeGroup.selectAll("circle")
.attr("cx", function(d) {
return d.x;
})
.attr("cy", function(d) {
return d.y;
});
// -
this.nodeTextGroup.selectAll("text")
.attr("x", function(d) {
return d.x;
})
.attr("y", function(d) {
return d.y;
});
//
this.nodeButtonGroup.selectAll("g")
.attr("transform", function(d) {
return "translate(" + d.x + "," + d.y + ") scale(1)";
});
//
this.nodeSymbolGroup.selectAll("path")
.attr("transform", function(d) {
return "translate(" + (d.x + 8) + "," + (d.y - 30) + ") scale(1)";
});
// 线
this.linkGroup.selectAll("path").attr("d", function(d) {
const dx = d.target.x - d.source.x;
const dy = d.target.y - d.source.y;
const dr = Math.sqrt(dx * dx + dy * dy);
const unevenCorrection = d.sameUneven ? 0 : 0.5;
const curvature = 2;
let arc = (1.0 / curvature) * ((dr * d.maxSameHalf) / (d.sameIndexCorrected - unevenCorrection));
if (d.sameMiddleLink) {
arc = 0;
}
return "M" + d.source.x + "," + d.source.y + "A" + arc + "," + arc + " 0 0," + d.sameArcDirection + " " + d.target.x + "," + d.target.y;
});
// 线
const linkTextList = this.linkTextGroup.selectAll("g");
linkTextList.attr("transform", function(d) {
if (d.target.x < d.source.x) {
const bbox = this.getBBox();
const rx = bbox.x + bbox.width / 2;
const ry = bbox.y + bbox.height / 2;
return "rotate(180 " + rx + " " + ry + ")";
} else {
return "rotate(360)";
}
});
linkTextList.select("text")
.attr("x", function(d) {
return (d.source.x + d.target.x) / 2;
})
.attr("y", function(d) {
return (d.source.y + d.target.y) / 2;
});
},
//
setOrbitSpeed(speed) {
this.orbitSpeed = speed;
} }
} }
}; };

View File

@ -7,18 +7,15 @@
--> -->
<template> <template>
<div id="follow-us" class="guanzhu" style="padding: 20px;"> <div id="follow-us" class="guanzhu" style="padding: 20px;">
<h2 class="hometitle">中国一重</h2> <h2 class="hometitle"></h2>
<ul> <ul>
<li class="wx"> <li class="wx">
<img <img
src="@/assets/cfhi.png" src="@/assets/中国一重.png" style="width: 100%;" />
alt="中国一重logo"
/>
</li> </li>
</ul> </ul>
</div> </div>
</template> </template>

View File

@ -8,7 +8,7 @@
<template> <template>
<div class="menu"> <div class="menu">
<nav class="nav" id="topnav"> <nav class="nav" id="topnav">
<h1 class="logo"><a href="/">֪KGBulider</a></h1> <h1 class="logo"><a href="/">中国一重</a></h1>
<ul style="float: left;margin-left: 60px;padding: 10px;"> <ul style="float: left;margin-left: 60px;padding: 10px;">
<template v-for="nav in navList"> <template v-for="nav in navList">
<li @mouseover="selectStyle(nav)" > <li @mouseover="selectStyle(nav)" >
@ -36,9 +36,7 @@ export default {
}, },
data() { data() {
return { return {
navList: [ navList: []
]
}; };
}, },
mounted() {}, mounted() {},

View File

@ -21,4 +21,19 @@ export default class BaseAPI {
} }
return request(Object.assign({ url, method: 'POST', data: temp }, config)) return request(Object.assign({ url, method: 'POST', data: temp }, config))
} }
put(url, data, config) {
let temp
if (
config &&
config?.headers &&
(config?.headers['Content-Type'].indexOf('application/json') !== -1 ||
config?.headers['Content-Type'].indexOf('multipart/form-data') !== -1)
) {
temp = data
} else {
temp = qs.stringify(data)
}
return request(Object.assign({ url, method: 'PUT', data: temp }, config))
}
} }

View File

@ -258,7 +258,8 @@
</el-form-item> </el-form-item>
</el-form> </el-form>
</div> </div>
</div>
<!--添加同级--> <!--添加同级-->
<div v-show="operate == 'batchAddSame'" class="pd-20"> <div v-show="operate == 'batchAddSame'" class="pd-20">
<div class="mb-l">添加同级</div> <div class="mb-l">添加同级</div>

View File

@ -30,6 +30,7 @@
:title="m.name" :title="m.name"
> >
<el-tag <el-tag
@contextmenu.native.prevent="deleteGraph(m)"
v-if="m.commend == 0" v-if="m.commend == 0"
closable closable
style="margin:2px" style="margin:2px"
@ -126,27 +127,6 @@
> >
<i class="el-icon-download">导出</i> <i class="el-icon-download">导出</i>
</a> </a>
<a
href="javascript:void(0)"
@click="requestFullScreen"
class="svg-a-sm"
>
<i class="el-icon-monitor">全屏</i>
</a>
<a
href="javascript:void(0)"
@click="help"
class="svg-a-sm"
>
<i class="el-icon-info">帮助</i>
</a>
<a
href="javascript:void(0)"
@click="wanted"
class="svg-a-sm"
>
<i class="el-icon-question">反馈</i>
</a>
</div> </div>
</div> </div>
<!-- 头部over --> <!-- 头部over -->
@ -1672,6 +1652,18 @@ export default {
_this.createSingleNode(event.offsetX, event.offsetY); _this.createSingleNode(event.offsetX, event.offsetY);
} }
event.preventDefault(); event.preventDefault();
},
deleteGraph(item) {
this.$confirm("确定要删除该图谱吗?", "提示", {
confirmButtonText: "确定",
cancelButtonText: "取消",
type: "warning"
}).then(() => {
const index = this.pageModel.nodeList.indexOf(item);
if (index > -1) {
this.pageModel.nodeList.splice(index, 1);
}
});
} }
} }
}; };

View File

@ -12,23 +12,43 @@
<div class="ml-m"> <div class="ml-m">
<div class="guanzhu" style="padding: 20px;"> <div class="guanzhu" style="padding: 20px;">
<h2 class="hometitle ml-ht">图谱列表</h2> <h2 class="hometitle ml-ht">图谱列表</h2>
<el-input
v-model="graphSearchQuery"
placeholder="搜索图谱"
clearable
style="margin-bottom: 10px"
></el-input>
<div class="ml-a-box" style="min-height:280px"> <div class="ml-a-box" style="min-height:280px">
<el-tag <el-tag
class="tag-ml-5" class="tag-ml-5"
@click="createDomain" @click="createDomain"
>新建图谱</el-tag >新建图谱</el-tag
> >
<el-tag <template v-for="(m, index) in filteredNodeList">
@click="matchDomainGraph(m)" <el-tag
v-for="(m, index) in pageModel.nodeList" v-if="m.id !== editingDomainId"
:key="index" :key="`tag-${index}`"
:type="m.type" @click="matchDomainGraph(m)"
effect="dark" @contextmenu.native.prevent="deleteGraph(m)"
:title="m.name" @dblclick.native="handleDblClick(m)"
class="tag-ml-5" :type="m.type"
> effect="dark"
{{ m.name }} :title="m.name"
</el-tag> class="tag-ml-5"
>
{{ m.name }}
</el-tag>
<el-input
v-else
:key="`input-${index}`"
:ref="`editInput-${m.id}`"
v-model="m.name"
@blur="handleInputBlur(m)"
@keyup.enter.native="handleInputEnter(m)"
size="mini"
class="tag-ml-5"
></el-input>
</template>
</div> </div>
<div class="fr"> <div class="fr">
<a <a
@ -66,16 +86,21 @@
</span> </span>
<div v-show="domain != ''" class="fl" style="display: flex"> <div v-show="domain != ''" class="fl" style="display: flex">
<div class="search"> <div class="search">
<el-button @click="getDomainGraph(0)"> <el-button @click="intelligentSearch">
<svg class="icon" aria-hidden="true"> <svg class="icon" aria-hidden="true">
<use xlink:href="#icon-search"></use> <use xlink:href="#icon-search"></use>
</svg> </svg>
</el-button> </el-button>
<el-input <el-autocomplete
placeholder="请输入关键词" class="inline-input"
v-model="nodeName" v-model="nodeName"
@keyup.enter.native="getDomainGraph" :fetch-suggestions="querySearch"
></el-input> placeholder="请输入关键词"
@select="handleSelect"
@keyup.enter.native="intelligentSearch"
:trigger-on-focus="false"
clearable
></el-autocomplete>
</div> </div>
<span> <span>
<span class="dibmr"> <span class="dibmr">
@ -92,6 +117,9 @@
</span> </span>
</div> </div>
<div class="fr"> <div class="fr">
<a href="javascript:void(0)" @click="toggleAnimation" class="svg-a-sm" :style="{color: isAnimating ? '#409EFF' : ''}">
<i :class="isAnimating ? 'el-icon-video-pause' : 'el-icon-video-play'">{{ isAnimating ? '停止旋转' : '旋转动画' }}</i>
</a>
<a href="javascript:void(0)" @click="showJsonData" class="svg-a-sm"> <a href="javascript:void(0)" @click="showJsonData" class="svg-a-sm">
<i class="el-icon-tickets">查看数据</i> <i class="el-icon-tickets">查看数据</i>
</a> </a>
@ -105,19 +133,7 @@
<a href="javascript:void(0)" @click="exportGraph" class="svg-a-sm"> <a href="javascript:void(0)" @click="exportGraph" class="svg-a-sm">
<i class="el-icon-download">导出</i> <i class="el-icon-download">导出</i>
</a> </a>
<a
href="javascript:void(0)"
@click="requestFullScreen"
class="svg-a-sm"
>
<i class="el-icon-monitor">全屏</i>
</a>
<a href="javascript:void(0)" @click="help" class="svg-a-sm">
<i class="el-icon-info">帮助</i>
</a>
<a href="javascript:void(0)" @click="wanted" class="svg-a-sm">
<i class="el-icon-question">反馈</i>
</a>
</div> </div>
</div> </div>
<!-- 头部over --> <!-- 头部over -->
@ -505,9 +521,24 @@ export default {
links: [] links: []
}, },
jsonShow: false, jsonShow: false,
helpShow: false helpShow: false,
graphSearchQuery: "",
originalName: "",
editingDomainId: null,
isAnimating: false
}; };
}, },
computed: {
filteredNodeList() {
if (this.graphSearchQuery) {
return this.pageModel.nodeList.filter(item => {
return item.name.toLowerCase().includes(this.graphSearchQuery.toLowerCase());
});
} else {
return this.pageModel.nodeList;
}
}
},
filters: { filters: {
labelFormat: function(value) { labelFormat: function(value) {
let domain = value.substring(1, value.length - 1); let domain = value.substring(1, value.length - 1);
@ -532,6 +563,142 @@ export default {
}); });
}, },
methods: { methods: {
//
querySearch(queryString, cb) {
if (!queryString || queryString.trim().length === 0) {
cb([]);
return;
}
if (!this.domain) {
cb([]);
return;
}
//
kgBuilderApi.getSearchSuggestions(queryString, this.domain).then(result => {
if (result.code === 200 && result.data) {
// el-autocomplete
const suggestions = result.data.map(item => {
return { value: item };
});
cb(suggestions);
} else {
cb([]);
}
}).catch(() => {
cb([]);
});
},
//
handleSelect(item) {
this.nodeName = item.value;
this.intelligentSearch();
},
//
toggleAnimation() {
const kgbuilder = this.$refs.kg_builder;
if (kgbuilder) {
this.isAnimating = kgbuilder.toggleOrbitAnimation();
if (this.isAnimating) {
this.$message.success('旋转动画已启动');
} else {
this.$message.info('旋转动画已停止');
}
}
},
intelligentSearch() {
if (!this.nodeName) {
this.$message.warning("请输入搜索内容");
return;
}
const data = {
text: this.nodeName,
domain: this.domain
};
kgBuilderApi.intelligentSearch(data).then(result => {
if (result.code === 200) {
const graphData = result.data;
if (graphData.nodes && graphData.nodes.length > 0) {
this.graphData = graphData;
} else {
this.$message.info("未找到相关节点");
}
} else {
this.$message.error("智能搜索失败: " + result.msg);
}
}).catch(error => {
this.$message.error("智能搜索异常: " + error);
});
},
deleteGraph(item) {
this.$confirm(`此操作将永久删除图谱 [${item.name}] 及所有数据, 是否继续?`, "提示", {
confirmButtonText: "确定",
cancelButtonText: "取消",
type: "warning"
}).then(() => {
let data = { domainId: item.id };
kgBuilderApi.deleteDomain(data).then(result => {
if (result.code == 200) {
this.$message({
type: "success",
message: "删除成功!"
});
this.getDomain();
} else {
this.$message.error("删除失败!");
}
});
}).catch(() => {
this.$message({
type: "info",
message: "已取消删除"
});
});
},
handleDblClick(item) {
this.editingDomainId = item.id;
this.originalName = item.name;
this.$nextTick(() => {
this.$refs[`editInput-${item.id}`][0].focus();
});
},
handleInputBlur(item) {
this.updateDomainName(item);
},
handleInputEnter(item) {
this.updateDomainName(item);
},
updateDomainName(item) {
if (item.name === this.originalName) {
this.editingDomainId = null;
return;
}
const data = {
oldName: this.originalName,
newName: item.name
};
kgBuilderApi.renameDomain(data).then(result => {
if (result.code === 200) {
this.$message({
type: "success",
message: "重命名成功!"
});
this.getDomain();
} else {
this.$message.error("重命名失败!");
item.name = this.originalName; //
}
this.editingDomainId = null;
}).catch(() => {
this.$message.error("重命名失败!");
item.name = this.originalName; //
this.editingDomainId = null;
});
},
_thisKey(item) { _thisKey(item) {
this._thisView = item; this._thisView = item;
}, },
@ -877,8 +1044,10 @@ export default {
if (result.code == 200) { if (result.code == 200) {
if (result.data != null) { if (result.data != null) {
_this.graphData = { nodes: [], links: [] }; _this.graphData = { nodes: [], links: [] };
_this.graphData.nodes = result.data.node; if (result.data) {
_this.graphData.links = result.data.relationship; _this.graphData.nodes = result.data.node;
_this.graphData.links = result.data.relationship;
}
} }
} }
}); });
@ -1455,6 +1624,20 @@ ul {
border: none; border: none;
transition: background 0.3s; transition: background 0.3s;
} }
.search .el-autocomplete {
width: 100%;
}
.search .el-autocomplete .el-input__inner {
box-sizing: border-box;
padding-left: 15px;
height: 32px;
line-height: 32px;
padding-right: 40px;
background: transparent;
border-radius: 32px;
border: none;
transition: background 0.3s;
}
.search .el-button--default { .search .el-button--default {
position: absolute; position: absolute;
right: 1px; right: 1px;

32
pom.xml
View File

@ -27,12 +27,21 @@
<qiniu-sdk.version>7.2.17</qiniu-sdk.version> <qiniu-sdk.version>7.2.17</qiniu-sdk.version>
<poi.version>3.17</poi.version> <poi.version>3.17</poi.version>
<javacsv.version>2.0</javacsv.version> <javacsv.version>2.0</javacsv.version>
<hanlp.version>portable-1.7.8</hanlp.version> <hanlp.version>1.8.3</hanlp.version>
<lombok.version>1.18.26</lombok.version> <lombok.version>1.18.26</lombok.version>
<hutool.version>5.8.18</hutool.version> <hutool.version>5.8.18</hutool.version>
<commons-fileupload.version>1.4</commons-fileupload.version> <commons-fileupload.version>1.4</commons-fileupload.version>
<fasterxml.jackson.version>2.15.0</fasterxml.jackson.version> <fasterxml.jackson.version>2.15.0</fasterxml.jackson.version>
</properties> </properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.hankcs</groupId>
<artifactId>hanlp</artifactId>
<version>portable-${hanlp.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies> <dependencies>
<dependency> <dependency>
@ -47,4 +56,25 @@
<module>kgBuilder-meta</module> <module>kgBuilder-meta</module>
<module>kgBuilder-pro</module> <module>kgBuilder-pro</module>
</modules> </modules>
<repositories>
<repository>
<id>aliyunmaven</id>
<name>aliyun maven</name>
<url>https://maven.aliyun.com/repository/public</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>aliyun-plugin</id>
<url>https://maven.aliyun.com/repository/public</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</pluginRepository>
</pluginRepositories>
</project> </project>