瀏覽代碼

Merge remote-tracking branch 'origin/push-test' into push-test

# Conflicts:
#	graph-web/src/main/java/org/diagbot/graphWeb/work/GraphCalculate.java
MarkHuang 5 年之前
父節點
當前提交
1656f5b79d
共有 100 個文件被更改,包括 5673 次插入2259 次删除
  1. 130 0
      algorithm/src/main/java/org/algorithm/core/FilterRule.java
  2. 282 0
      algorithm/src/main/java/org/algorithm/core/RelationTreeUtils.java
  3. 493 0
      algorithm/src/main/java/org/algorithm/core/RuleCheckMachine.java
  4. 3 4
      algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutor.java
  5. 22 0
      algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutorPacs.java
  6. 1 1
      algorithm/src/main/java/org/algorithm/core/cnn/dataset/RelationExtractionDataSet.java
  7. 32 1
      algorithm/src/main/java/org/algorithm/core/cnn/entity/Lemma.java
  8. 40 13
      algorithm/src/main/java/org/algorithm/core/cnn/model/RelationExtractionEnsembleModel.java
  9. 4 4
      algorithm/src/main/java/org/algorithm/core/cnn/model/RelationExtractionModel.java
  10. 2 1
      algorithm/src/main/java/org/algorithm/core/neural/DiagnosisPredictExecutor.java
  11. 38 17
      algorithm/src/main/java/org/algorithm/core/neural/TensorFlowModelLoadFactory.java
  12. 104 36
      algorithm/src/main/java/org/algorithm/core/neural/TensorflowModel.java
  13. 308 13
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSet.java
  14. 185 74
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImpl.java
  15. 26 1
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImplNonParallel.java
  16. 33 0
      algorithm/src/main/java/org/algorithm/factory/RelationExtractionFactory.java
  17. 5 3
      algorithm/src/main/java/org/algorithm/test/ReEnsembleModelTest.java
  18. 9 4
      algorithm/src/main/java/org/algorithm/test/TensorflowExcutorTest.java
  19. 24 37
      algorithm/src/main/java/org/algorithm/test/Test.java
  20. 46 0
      algorithm/src/main/java/org/algorithm/test/TestDiagnosisFilter.java
  21. 34 0
      algorithm/src/main/java/org/algorithm/test/TestReSplit.java
  22. 15 0
      algorithm/src/main/java/org/algorithm/test/TestRelationTreeUtils.java
  23. 140 0
      algorithm/src/main/java/org/algorithm/test/TestRuleCheckMachine.java
  24. 1 1
      algorithm/src/main/java/org/algorithm/util/MysqlConnector.java
  25. 1 1
      algorithm/src/main/resources/algorithm.properties
  26. 12 0
      bigdata-web/pom.xml
  27. 0 285
      bigdata-web/src/main/java/org/diagbot/bigdata/common/ApplicationCacheUtil.java
  28. 0 98
      bigdata-web/src/main/java/org/diagbot/bigdata/common/InitListener.java
  29. 2 2
      bigdata-web/src/main/java/org/diagbot/bigdata/controller/AlgorithmController.java
  30. 0 22
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingDiagMapper.java
  31. 0 22
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingFilterMapper.java
  32. 0 22
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingVitalMapper.java
  33. 0 38
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingDiag.java
  34. 0 38
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingVital.java
  35. 0 12
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingDiagWrapper.java
  36. 0 6
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingFilterWrapper.java
  37. 0 7
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingVitalWrapper.java
  38. 0 55
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingDiagMapper.xml
  39. 0 67
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingFilterMapper.xml
  40. 0 77
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingVitalMapper.xml
  41. 0 8
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingDiagService.java
  42. 0 8
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingFilterService.java
  43. 0 8
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingVitalService.java
  44. 0 21
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingDiagServiceImpl.java
  45. 0 21
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingFilterServiceImpl.java
  46. 0 21
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingVitalServiceImpl.java
  47. 29 13
      bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java
  48. 95 0
      bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataParamsProxy.java
  49. 1 1
      bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataSearchData.java
  50. 0 702
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ParamsDataProxy.java
  51. 56 84
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java
  52. 37 41
      bigdata-web/src/test/java/org/diagbot/AddStandWordTest.java
  53. 135 0
      bigdata-web/src/test/java/org/diagbot/EyeHospitalData.java
  54. 231 0
      bigdata-web/src/test/java/org/diagbot/Rule2AppTest.java
  55. 191 0
      bigdata-web/src/test/java/org/diagbot/RuleTest.java
  56. 2 2
      common-push/pom.xml
  57. 0 11
      common-push/src/main/java/org/diagbot/common/push/Test.java
  58. 43 0
      common-push/src/main/java/org/diagbot/common/push/bean/CrisisDetail.java
  59. 1 1
      common-service/src/main/java/org/diagbot/common/work/FeatureRate.java
  60. 1 1
      common-service/src/main/java/org/diagbot/common/work/LisDetail.java
  61. 63 0
      common-push/src/main/java/org/diagbot/common/push/bean/RelevantFeature.java
  62. 41 12
      common-service/src/main/java/org/diagbot/common/work/ResponseData.java
  63. 1 1
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingFilter.java
  64. 163 0
      common-push/src/main/java/org/diagbot/common/push/bean/Rule.java
  65. 45 0
      common-push/src/main/java/org/diagbot/common/push/bean/RuleApp.java
  66. 509 0
      common-push/src/main/java/org/diagbot/common/push/bean/SearchData.java
  67. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/Detail.java
  68. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/Drugs.java
  69. 54 0
      common-push/src/main/java/org/diagbot/common/push/bean/neo4j/Filnlly.java
  70. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/FuzhenFilnlly.java
  71. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/Indicators.java
  72. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/MangementEvaluation.java
  73. 10 1
      common-service/src/main/java/org/diagbot/common/javabean/MedicalIndication.java
  74. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/MedicalIndicationDetail.java
  75. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/Medicition.java
  76. 24 0
      common-push/src/main/java/org/diagbot/common/push/bean/neo4j/MeditionDetail.java
  77. 27 0
      common-push/src/main/java/org/diagbot/common/push/bean/neo4j/Treat.java
  78. 24 0
      common-push/src/main/java/org/diagbot/common/push/bean/neo4j/TreatCate.java
  79. 24 0
      common-push/src/main/java/org/diagbot/common/push/bean/neo4j/TreatDetail.java
  80. 396 0
      common-push/src/main/java/org/diagbot/common/push/cache/ApplicationCacheUtil.java
  81. 188 60
      common-push/src/main/java/org/diagbot/common/push/cache/CacheFileManager.java
  82. 1 1
      graph/src/main/java/org/diagbot/graph/util/CacheUtil.java
  83. 106 43
      common-push/src/main/java/org/diagbot/common/push/filter/ClassifyDiag.java
  84. 0 79
      common-push/src/main/java/org/diagbot/common/push/filter/PreResult.java
  85. 0 45
      common-push/src/main/java/org/diagbot/common/push/filter/PretreatmentFilter.java
  86. 0 41
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentLis.java
  87. 0 17
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentSymptom.java
  88. 0 17
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentVital.java
  89. 80 0
      common-push/src/main/java/org/diagbot/common/push/filter/rule/CalcFormula.java
  90. 335 0
      common-push/src/main/java/org/diagbot/common/push/filter/rule/PretreatmentRule.java
  91. 30 0
      common-push/src/main/java/org/diagbot/common/push/naivebayes/NaiveBayesTest.java
  92. 92 0
      common-push/src/main/java/org/diagbot/common/push/naivebayes/core/AlgorithmNaiveBayesExecutor.java
  93. 34 0
      common-push/src/main/java/org/diagbot/common/push/naivebayes/factory/AlgorithmNaiveBayesFactory.java
  94. 101 0
      common-push/src/main/java/org/diagbot/common/push/util/CryptUtil.java
  95. 88 0
      common-push/src/main/java/org/diagbot/common/push/util/ListUtil.java
  96. 33 2
      bigdata-web/src/main/java/org/diagbot/bigdata/util/BigDataConstants.java
  97. 288 0
      common-push/src/main/java/org/diagbot/common/push/work/ParamsDataProxy.java
  98. 95 0
      common-push/src/main/java/org/diagbot/common/push/work/RelationExtractionUtil.java
  99. 0 28
      common-service/src/main/java/org/diagbot/common/javabean/Filnlly.java
  100. 0 0
      common-service/src/main/java/org/diagbot/common/javabean/Rule.java

+ 130 - 0
algorithm/src/main/java/org/algorithm/core/FilterRule.java

@@ -0,0 +1,130 @@
+package org.algorithm.core;
+
+import java.util.Map;
+
+/**
+ * 过滤规则
+ *
+ * @Author: bijl
+ * @Date: 2019/9/5 20:21
+ * @Description:
+ */
+public class FilterRule {
+
+    private Integer uuid;
+
+    private String key_1;
+    private String type_1;
+
+    private String key_2;
+    private String type_2;
+
+    private String inside;
+    private String insideType;
+
+    private String despite;
+    private String despiteInside;
+
+    public FilterRule(Map<String, String> aMap) {
+
+        this.key_1 = aMap.get("key_1");
+        this.type_1 = aMap.get("type_1");
+
+        this.key_2 = aMap.get("key_2");
+        this.type_2 = aMap.get("type_2");
+
+        this.inside = aMap.get("inside");
+        this.insideType = aMap.get("inside_type");
+
+        this.despite = aMap.get("despite");
+        this.despiteInside = aMap.get("despite_inside");
+    }
+
+    public Integer getUuid() {
+        return uuid;
+    }
+
+    public void setUuid(Integer uuid) {
+        this.uuid = uuid;
+    }
+
+
+    public String getKey_1() {
+        return key_1;
+    }
+
+    public void setKey_1(String key_1) {
+        this.key_1 = key_1;
+    }
+
+    public String getType_1() {
+        return type_1;
+    }
+
+    public void setType_1(String type_1) {
+        this.type_1 = type_1;
+    }
+
+    public String getKey_2() {
+        return key_2;
+    }
+
+    public void setKey_2(String key_2) {
+        this.key_2 = key_2;
+    }
+
+    public String getType_2() {
+        return type_2;
+    }
+
+    public void setType_2(String type_2) {
+        this.type_2 = type_2;
+    }
+
+    public String getInside() {
+        return inside;
+    }
+
+    public void setInside(String inside) {
+        this.inside = inside;
+    }
+
+    public String getInsideType() {
+        return insideType;
+    }
+
+    public void setInsideType(String insideType) {
+        this.insideType = insideType;
+    }
+
+    public String getDespite() {
+        return despite;
+    }
+
+    public void setDespite(String despite) {
+        this.despite = despite;
+    }
+
+    public String getDespiteInside() {
+        return despiteInside;
+    }
+
+    public void setDespiteInside(String despiteInside) {
+        this.despiteInside = despiteInside;
+    }
+
+    @Override
+    public String toString() {
+        return "FilterRule{" +
+                "uuid=" + uuid +
+                ", key_1='" + key_1 + '\'' +
+                ", type_1='" + type_1 + '\'' +
+                ", key_2='" + key_2 + '\'' +
+                ", type_2='" + type_2 + '\'' +
+                ", inside='" + inside + '\'' +
+                ", insideType='" + insideType + '\'' +
+                ", despite='" + despite + '\'' +
+                ", despiteInside='" + despiteInside + '\'' +
+                '}';
+    }
+}

+ 282 - 0
algorithm/src/main/java/org/algorithm/core/RelationTreeUtils.java

@@ -0,0 +1,282 @@
+package org.algorithm.core;
+
+import org.algorithm.core.cnn.entity.Lemma;
+import org.algorithm.core.cnn.entity.Triad;
+
+import java.util.*;
+
+/**
+ * 关系树工具类
+ *
+ * @Author: bijl
+ * @Date: 2019/9/5 15:16
+ * @Description:
+ */
+public class RelationTreeUtils {
+
+    /**
+     * 同名实体(这里也叫词项)归并
+     * 规则:
+     * 1- 直接替代为位置最前面的一个
+     *
+     * @param triads 实体对列表
+     */
+    public static void sameTextLemmaMerge(List<Triad> triads) {
+
+        Map<String, Lemma> lemmaMap = new HashMap<>();
+        for (Triad triad : triads) {
+            Lemma l1 = triad.getL_1();
+            Lemma l2 = triad.getL_2();
+
+            if (lemmaMap.get(l1.getText()) == null)
+                lemmaMap.put(l1.getText(), l1);
+            else {
+                Lemma l1Pre = lemmaMap.get(l1.getText());
+                if (l1Pre.getStartPosition() > l1.getStartPosition())
+                    triad.setL_1(l1);  // 取靠前的
+            }
+
+            if (lemmaMap.get(l2.getText()) == null)
+                lemmaMap.put(l2.getText(), l2);
+            else {
+                Lemma l2Pre = lemmaMap.get(l2.getText());
+                if (l2Pre.getStartPosition() > l2.getStartPosition())
+                    triad.setL_2(l2);  // 取靠前的
+            }
+        }
+        for (Triad triad : triads) {
+            Lemma l1 = triad.getL_1();
+            Lemma l2 = triad.getL_2();
+            triad.setL_1(lemmaMap.get(l1.getText()));  // 用前面的同名实体(这里也叫词项)替代后面的
+            triad.setL_2(lemmaMap.get(l2.getText()));  // 用前面的同名实体(这里也叫词项)替代后面的
+        }
+    }
+
+    /**
+     * 构建关系树
+     * 基本规则:
+     * 1- 两个有关系的实体,前面的为父节点,后面的为子节点
+     *
+     * @param triads 有关系的三元组列表
+     */
+    public static void buildRelationTree(List<Triad> triads) {
+        for (Triad triad : triads) {
+            Lemma l1 = triad.getL_1();
+            Lemma l2 = triad.getL_2();
+            if (l1.getStartPosition() < l2.getStartPosition()) {  // 在前者为父节点
+                l1.setHaveChildren(true);
+                l2.setParent(l1);
+            } else {
+                l2.setHaveChildren(true);
+                l1.setParent(l2);
+            }
+        }
+    }
+
+    /**
+     * 获取关系树的分枝
+     *
+     * @param triads 有关系,并且设置了父子节点关系的三元组
+     */
+    public static List<List<String>> getRelationTreeBranches(List<Triad> triads) {
+        Map<Lemma, Integer> leafNodeLemmas = new HashMap<>();
+
+        for (Triad triad : triads) {
+            if (!triad.getL_1().isHaveChildren())
+                leafNodeLemmas.putIfAbsent(triad.getL_1(), 1);
+
+            if (!triad.getL_2().isHaveChildren())
+                leafNodeLemmas.putIfAbsent(triad.getL_2(), 1);
+        }
+
+        List<List<String>> branches = new ArrayList<>();
+        for (Lemma lemma : leafNodeLemmas.keySet()) {
+            List<Lemma> aBranch = new ArrayList<>();
+            while (lemma != null) {
+                aBranch.add(lemma);
+                lemma = lemma.getParent();
+            }
+            aBranch.sort(Comparator.naturalOrder());  // 按位置排序
+            branches.addAll(handleBranch(aBranch));
+        }
+
+
+        return branches;
+    }
+
+    /**
+     * 处理分枝,要求组合非阴性词,阴性词必须包含
+     * 操作:
+     * 1- 分离阴性词和非阴性词
+     * 2- 组合非阴性词
+     * 3- 添加阴性词到组合结果中
+     *
+     * @param aBranch
+     * @return
+     */
+    private static List<List<String>> handleBranch(List<Lemma> aBranch) {
+        List<Lemma> nonNegativeLemmas = new ArrayList<>();
+        List<Lemma> negativeLemmas = new ArrayList<>();
+        for (Lemma lemma : aBranch) {
+            if ("反意或虚拟".equals(lemma.getProperty()))
+                negativeLemmas.add(lemma);
+            else
+                nonNegativeLemmas.add(lemma);
+        }
+        List<List<Lemma>> nonNegativeLemmaCombinations = new ArrayList<>();
+        if (nonNegativeLemmas.size() > 0) {
+            for (int i = 1; i <= nonNegativeLemmas.size(); i++) {
+                combinerSelect(nonNegativeLemmas, new ArrayList<>(), nonNegativeLemmaCombinations,
+                        nonNegativeLemmas.size(), i);
+            }
+        }
+        List<List<String>> result = new ArrayList<>();
+        for (List<Lemma> lemmaCombination : nonNegativeLemmaCombinations) {
+            List<String> lemmaNames = new ArrayList<>();
+            lemmaCombination.addAll(negativeLemmas);  // 阴性词加入到组合中
+            lemmaCombination.sort(Comparator.naturalOrder());  // 按位置排序
+            for (Lemma lemma : lemmaCombination)  // 取出名称
+                lemmaNames.add(lemma.getText());
+            if (lemmaNames.size() >= 2)
+                result.add(lemmaNames);
+        }
+
+        return result;
+
+    }
+
+    /**
+     * 从三元组列表到关系树分枝
+     *
+     * @param triads
+     * @return
+     */
+    public static List<List<String>> triadsToRelationTreeBranches(List<Triad> triads) {
+//        sameTextLemmaMerge(triads);
+        buildRelationTree(triads);
+        return getRelationTreeBranches(triads);
+    }
+
+    /**
+     * 组合生成器
+     *
+     * @param data      原始数据
+     * @param workSpace 自定义一个临时空间,用来存储每次符合条件的值
+     * @param k         C(n,k)中的k
+     */
+    private static <E> void combinerSelect(List<E> data, List<E> workSpace, List<List<E>> result, int n, int k) {
+        List<E> copyData;
+        List<E> copyWorkSpace = null;
+
+        if (workSpace.size() == k) {
+//            for (E c : workSpace)
+//                System.out.print(c);
+
+            result.add(new ArrayList<>(workSpace));
+//            System.out.println();
+        }
+
+        for (int i = 0; i < data.size(); i++) {
+            copyData = new ArrayList<E>(data);
+            copyWorkSpace = new ArrayList<E>(workSpace);
+
+            copyWorkSpace.add(copyData.get(i));
+            for (int j = i; j >= 0; j--)
+                copyData.remove(j);
+            combinerSelect(copyData, copyWorkSpace, result, n, k);
+        }
+    }
+
+    /**
+     * 全排列算法
+     *
+     * @param stringList 字符串列表
+     * @return
+     */
+    public static ArrayList<ArrayList<String>> permute(List<String> stringList) {
+        ArrayList<ArrayList<String>> result = new ArrayList<ArrayList<String>>();
+        result.add(new ArrayList<String>());
+
+        for (int i = 0; i < stringList.size(); i++) {
+            //list of list in current iteration of the stringList num
+            ArrayList<ArrayList<String>> current = new ArrayList<ArrayList<String>>();
+
+            for (ArrayList<String> l : result) {
+                // # of locations to insert is largest index + 1
+                for (int j = 0; j < l.size() + 1; j++) {
+                    // + add num[i] to different locations
+                    l.add(j, stringList.get(i));
+
+                    ArrayList<String> temp = new ArrayList<String>(l);
+                    current.add(temp);
+
+                    // - remove num[i] add
+                    l.remove(j);
+                }
+            }
+
+            result = new ArrayList<>(current);
+        }
+
+        return result;
+    }
+
+
+    /**
+     * 测试文件
+     */
+    public static void test() {
+
+        List<Triad> triads = new ArrayList<>();
+        String[] arr_1 = {"子宫", "0,1", "部位"};
+        String[] arr_2 = {"内膜", "2,3", "结构"};
+        addTriad(arr_1, arr_2, triads);
+
+        String[] arr_1_1 = {"不", "13,13", "反意或虚拟"};
+        String[] arr_2_1 = {"出血", "10,11", "形容词"};
+        addTriad(arr_1_1, arr_2_1, triads);
+
+        String[] arr_1_2 = {"胸部", "15,16", "部位"};
+        String[] arr_2_2 = {"剧烈", "17,18", "程度"};
+        addTriad(arr_1_2, arr_2_2, triads);
+
+        String[] arr_1_3 = {"疼痛", "17,18", "形容词"};
+        String[] arr_2_3 = {"剧烈", "19,20", "程度"};
+        addTriad(arr_1_3, arr_2_3, triads);
+
+        String[] arr_1_4 = {"内膜", "2,3", "结构"};
+        String[] arr_2_4 = {"出血", "10,11", "形容词"};
+        addTriad(arr_1_4, arr_2_4, triads);
+
+        System.out.println(triads.size());
+        sameTextLemmaMerge(triads);
+        buildRelationTree(triads);
+        List<List<String>> info = getRelationTreeBranches(triads);
+
+        System.out.println(info);
+    }
+
+    /**
+     * 增加三元组
+     */
+    private static void addTriad(String[] lemma_1, String[] lemma_2, List<Triad> triads) {
+        Lemma lemma1 = new Lemma();
+        lemma1.setText(lemma_1[0]);
+        lemma1.setPosition(lemma_1[1]);
+        lemma1.setProperty(lemma_1[2]);
+
+        Lemma lemma2 = new Lemma();
+        lemma2.setText(lemma_2[0]);
+        lemma2.setPosition(lemma_2[1]);
+        lemma2.setProperty(lemma_2[2]);
+
+        Triad triad = new Triad();
+        triad.setL_1(lemma1);
+        triad.setL_2(lemma2);
+
+        triads.add(triad);
+
+    }
+
+
+}

+ 493 - 0
algorithm/src/main/java/org/algorithm/core/RuleCheckMachine.java

@@ -0,0 +1,493 @@
+package org.algorithm.core;
+
+import org.algorithm.core.cnn.entity.Lemma;
+import org.algorithm.core.cnn.entity.Triad;
+import org.algorithm.util.MysqlConnector;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+
+/**
+ * 规则检查机器
+ *
+ * @Author: bijl
+ * @Date: 2019/9/6 10:32
+ * @Description:
+ */
+public class RuleCheckMachine {
+    private final List<FilterRule> filterRules = new ArrayList<>();
+    private Map<String, Map<String, Set<Integer>>> key_1_map = null;
+    private Map<String, Map<String, Set<Integer>>> key_2_map = null;
+    private Map<String, String> punctuations = new HashMap<>();
+    private Map<String, Set<Integer>> despiteMap = null;  // 实体名:[规则uuid列表]
+    private Map<String, Set<Integer>> despiteInsideMap = null; // 实体名:[规则uuid列表]
+    private Map<String, Map<String, Set<Integer>>> insideMap = null;
+
+
+    public RuleCheckMachine() {
+        this.loadRules();
+        this.makeKey1Map();
+        this.makeKey2Map();
+        this.makeInsideMap();
+        this.makeDespiteMap();
+        this.makeDespiteInsideMap();
+    }
+
+
+    /**
+     * 加载规则
+     */
+    public void loadRules() {
+        /**
+         * 连接数据库
+         */
+        String url = "jdbc:mysql://192.168.2.235/test_case?user=root&password=diagbot@20180822";
+        MysqlConnector connector = new MysqlConnector(url);
+        String querySql =
+                "SELECT rr.key_1, rr.type_1, rr.key_2, rr.type_2, rr.inside, rr.inside_type, " +
+                        "rr.despite, rr.despite_inside " +
+                        "FROM relation_neg_rules AS rr " +
+                        "WHERE rr.`status` = 1";
+
+        ResultSet rs = connector.query(querySql);
+        Integer uuid = 0;
+        try {
+            while (rs.next()) {
+                String key_1 = rs.getString("key_1");
+                String type_1 = rs.getString("type_1");
+
+                String key_2 = rs.getString("key_2");
+                String type_2 = rs.getString("type_2");
+
+                String inside = rs.getString("inside");
+                String inside_type = rs.getString("inside_type");
+
+                String despite = rs.getString("despite");
+                String despite_inside = rs.getString("despite_inside");
+
+                String[] despiteSplit = despite.split(",");
+                String[] despiteInsideSplit = despite_inside.split(",");
+                for (int j = 0; j < despiteSplit.length; j++) {
+                    for (int k = 0; k < despiteInsideSplit.length; k++) {
+                        Map<String, String> variableMap = new HashMap<>();
+                        variableMap.put("key_1", key_1);
+                        variableMap.put("type_1", type_1);
+
+                        variableMap.put("key_2", key_2);
+                        variableMap.put("type_2", type_2);
+
+                        variableMap.put("inside", inside);
+                        variableMap.put("inside_type", inside_type);
+
+                        variableMap.put("despite", despiteSplit[j]);
+                        variableMap.put("despite_inside", despiteInsideSplit[k]);
+
+                        FilterRule filterRule = new FilterRule(variableMap);
+                        filterRule.setUuid(uuid);
+                        this.filterRules.add(filterRule);
+
+//                            System.out.println(filterRule);
+
+                        uuid += 1;
+                    }
+                }
+
+            }
+
+        } catch (SQLException e) {
+            e.printStackTrace();
+            throw new RuntimeException("加载规则字典失败");
+        } finally {
+            connector.close();
+        }
+    }
+
+    /**
+     * 制作实体1相关信息字典
+     */
+    private void makeKey1Map() {
+        Map<String, Map<String, Set<Integer>>> key_1_map_ = new HashMap<>();
+        Map<String, Set<Integer>> emptyMap = new HashMap<>();
+        Map<String, Set<Integer>> typeMap = new HashMap<>();
+        Map<String, Set<Integer>> wordMap = new HashMap<>();
+        key_1_map_.put("", emptyMap);
+        key_1_map_.put("type", typeMap);
+        key_1_map_.put("word", wordMap);
+
+        for (FilterRule rule : this.filterRules) {
+            String key_1 = rule.getKey_1();
+            String type_1 = rule.getType_1();
+            Integer uuid = rule.getUuid();
+
+            this.inputMaps(key_1, type_1, uuid, emptyMap, typeMap, wordMap, null);
+        }
+        this.key_1_map = key_1_map_;
+    }
+
+
+    /**
+     * 制作实体2相关信息字典
+     */
+    private void makeKey2Map() {
+        Map<String, Map<String, Set<Integer>>> key_2_map_ = new HashMap<>();
+        Map<String, Set<Integer>> emptyMap = new HashMap<>();
+        Map<String, Set<Integer>> typeMap = new HashMap<>();
+        Map<String, Set<Integer>> wordMap = new HashMap<>();
+        key_2_map_.put("", emptyMap);
+        key_2_map_.put("type", typeMap);
+        key_2_map_.put("word", wordMap);
+
+        for (FilterRule rule : this.filterRules) {
+            String key_2 = rule.getKey_2();
+            String type_2 = rule.getType_2();
+            Integer uuid = rule.getUuid();
+
+            this.inputMaps(key_2, type_2, uuid, emptyMap, typeMap, wordMap, null);
+        }
+        this.key_2_map = key_2_map_;
+    }
+
+    /**
+     * 制作内部实体相关信息字典
+     */
+    private void makeInsideMap() {
+        Map<String, Map<String, Set<Integer>>> insideMap_ = new HashMap<>();
+        Map<String, Set<Integer>> punctuationMap = new HashMap<>();
+        Map<String, Set<Integer>> typeMap = new HashMap<>();
+        Map<String, Set<Integer>> typePunctuationMap = new HashMap<>();
+        Map<String, Set<Integer>> wordMap = new HashMap<>();
+        insideMap_.put("punc", punctuationMap);
+        insideMap_.put("type", typeMap);
+        insideMap_.put("typePunctuation", typePunctuationMap);
+        insideMap_.put("word", wordMap);
+
+        for (FilterRule rule : this.filterRules) {
+            String inside = rule.getInside();
+            String insideType = rule.getInsideType();
+            Integer uuid = rule.getUuid();
+            if (insideType.equals("punc"))
+                this.punctuations.put(inside, inside);
+
+            if (",".equals(inside.substring(0, 1)))
+                this.inputMaps(inside, insideType, uuid, null, typePunctuationMap, wordMap, punctuationMap);
+            else
+                this.inputMaps(inside, insideType, uuid, null, typeMap, wordMap, punctuationMap);
+        }
+        this.insideMap = insideMap_;
+    }
+
+    /**
+     * maps输入
+     *
+     * @param key
+     * @param type
+     * @param uuid
+     * @param emptyMap
+     * @param typeMap
+     * @param wordMap
+     */
+    private void inputMaps(String key, String type, Integer uuid, Map<String, Set<Integer>> emptyMap,
+                           Map<String, Set<Integer>> typeMap, Map<String, Set<Integer>> wordMap,
+                           Map<String, Set<Integer>> punctuationMap) {
+
+        if ("".equals(type)) {
+            if (emptyMap.get(key) == null)
+                emptyMap.put(key, new HashSet<>());
+            emptyMap.get(key).add(uuid);
+        } else if ("type".equals(type)) {
+            if (typeMap.get(key) == null)
+                typeMap.put(key, new HashSet<>());
+            typeMap.get(key).add(uuid);
+        } else if ("word".equals(type)) {
+            if (wordMap.get(key) == null)
+                wordMap.put(key, new HashSet<>());
+            wordMap.get(key).add(uuid);
+        } else if ("punc".equals(type)) {
+            if (punctuationMap.get(key) == null)
+                punctuationMap.put(key, new HashSet<>());
+            punctuationMap.get(key).add(uuid);
+        } else {
+            throw new RuntimeException("出现了位置新type");
+        }
+
+    }
+
+
+    /**
+     * 制作例外字典
+     */
+    private void makeDespiteMap() {
+        Map<String, Set<Integer>> despiteMap = new HashMap<>();
+        for (FilterRule rule : this.filterRules) {
+            String despite = rule.getDespite();
+            if (!despite.equals("")) {  // 空白不收录
+                if (despiteMap.get(despite) == null) {
+                    despiteMap.put(despite, new HashSet<>());
+                }
+                despiteMap.get(despite).add(rule.getUuid());  //
+            }
+        }
+        this.despiteMap = despiteMap;
+    }
+
+
+    /**
+     * 制作例外_内部字典
+     */
+    private void makeDespiteInsideMap() {
+        Map<String, Set<Integer>> despiteInsideMap = new HashMap<>();
+        for (FilterRule rule : this.filterRules) {
+            String despiteInside = rule.getDespiteInside();
+            if (!despiteInside.equals("")) {  // 空白不收录
+                if (despiteInsideMap.get(despiteInside) == null) {
+                    despiteInsideMap.put(despiteInside, new HashSet<>());
+                }
+                despiteInsideMap.get(despiteInside).add(rule.getUuid());  //
+            }
+        }
+        this.despiteInsideMap = despiteInsideMap;
+    }
+
+    /**
+     * 名称—类别—开始位置类
+     */
+    class NameTypeStartPosition implements Comparable<NameTypeStartPosition> {
+        private String name;
+        private String type;
+        private int startPosition;
+
+        public NameTypeStartPosition(String name, String type, int startPosition) {
+            this.name = name;
+            this.type = type;
+            this.startPosition = startPosition;
+        }
+
+        @Override
+        public int compareTo(NameTypeStartPosition o) {
+            return this.startPosition - o.getStartPosition();
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public void setName(String name) {
+            this.name = name;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public void setType(String type) {
+            this.type = type;
+        }
+
+        public int getStartPosition() {
+            return startPosition;
+        }
+
+        public void setStartPosition(int startPosition) {
+            this.startPosition = startPosition;
+        }
+
+        @Override
+        public String toString() {
+            return "NameTypeStartPosition{" +
+                    "name='" + name + '\'' +
+                    ", type='" + type + '\'' +
+                    ", startPosition=" + startPosition +
+                    '}';
+        }
+
+    }
+
+    /**
+     * 获取已排序的(名称,类别,开始位置)对象
+     *
+     * @param triads
+     * @return
+     */
+    public List<NameTypeStartPosition> getSortedNameTypeByPosition(List<Triad> triads) {
+        List<NameTypeStartPosition> nameTypeStartPositions = new ArrayList<>();
+        for (Triad triad : triads) {
+            Lemma l1 = triad.getL_1();
+            Lemma l2 = triad.getL_2();
+            nameTypeStartPositions.add(
+                    new NameTypeStartPosition(l1.getText(), l1.getProperty(), l1.getStartPosition()));
+            nameTypeStartPositions.add(
+                    new NameTypeStartPosition(l2.getText(), l2.getProperty(), l2.getStartPosition()));
+        }
+        nameTypeStartPositions.sort(Comparator.naturalOrder());
+
+        return nameTypeStartPositions;
+    }
+
+    /**
+     * 是否移除
+     *
+     * @param nameTypeStartPositions
+     * @param startIndex
+     * @param endIndex
+     * @return
+     */
+    public boolean isRemove(List<NameTypeStartPosition> nameTypeStartPositions, int startIndex, int endIndex,
+                            String sentence) {
+        Set<Integer> remainUuids = new HashSet<>();  // 剩余规则的uuid
+        for (FilterRule rule : this.filterRules)
+            remainUuids.add(rule.getUuid());
+
+        // 过滤实体名称触发例外条件情况
+        String entity_1_name = nameTypeStartPositions.get(startIndex).getName();
+        String entity_1_type = nameTypeStartPositions.get(startIndex).getType();
+
+        String entity_2_name = nameTypeStartPositions.get(endIndex).getType();
+        String entity_2_type = nameTypeStartPositions.get(endIndex).getType();
+
+        Set<Integer> set = null;
+        set = this.despiteMap.get(entity_1_name);  // 过滤有实体1名为例外情况(即,不成立)的规则(的uuid)
+        this.removeAll(remainUuids, set);
+
+        set = this.despiteMap.get(entity_2_name);  // 过滤有实体2名为例外情况(即,不成立)的规则(的uuid)
+        this.removeAll(remainUuids, set);
+
+        // 过滤中间实体的名称触发例外条件情况
+        for (int i = startIndex; i <= endIndex; i++) {
+            NameTypeStartPosition nameTypeStartPosition = nameTypeStartPositions.get(i);
+            set = this.despiteInsideMap.get(nameTypeStartPosition.getName());
+            this.removeAll(remainUuids, set);
+        }
+
+        // 三板斧过滤
+        // 实体1,过滤
+        set = new HashSet<>();
+        this.addAll(set, this.key_1_map.get("").get(""));
+        // 满足,形如("形容词", "type") 过滤条件的规则
+        this.addAll(set, this.key_1_map.get("type").get(entity_1_type));
+        // 满足,形如("胸痛", "word") 过滤条件的规则
+        this.addAll(set, this.key_1_map.get("word").get(entity_1_name));
+        this.retainAll(remainUuids, set);  // 求交集,同事满足实体1相关的过滤条件,且不不满足例外情况
+        if (remainUuids.size() == 0)
+            return false;
+
+        // 实体2,过滤
+        set = new HashSet<>();
+        this.addAll(set, this.key_2_map.get("").get(""));
+        // 满足,形如("形容词", "type") 过滤条件的规则
+        this.addAll(set, this.key_2_map.get("type").get(entity_2_type));
+        // 满足,形如("胸痛", "word") 过滤条件的规则
+        this.addAll(set, this.key_2_map.get("word").get(entity_2_name));
+        this.retainAll(remainUuids, set);  // 求交集,同事满足实体1相关的过滤条件,且不不满足例外情况
+        if (remainUuids.size() == 0)
+            return false;
+
+        // 中间实体过滤
+        set = new HashSet<>();
+        for (int i = startIndex; i <= endIndex; i++) {
+            NameTypeStartPosition nameTypeStartPosition = nameTypeStartPositions.get(i);
+            // 中间实体满足,形如("胸痛", "word") 过滤条件的规则
+            this.addAll(set, this.insideMap.get("word").get(nameTypeStartPosition.getName()));
+            // 中间实体满足,形如(";", "punc") 过滤条件的规则
+            this.addAll(set, this.insideMap.get("type").get(nameTypeStartPosition.getType()));  // 没有逗号的
+        }
+
+        int entity_1_start = nameTypeStartPositions.get(startIndex).getStartPosition();
+        int entity_2_start = nameTypeStartPositions.get(endIndex).getStartPosition();
+
+        // 标点过滤
+        String aPunc = null;
+        for (int i=entity_1_start; i<entity_2_start;i++){
+            aPunc = sentence.substring(i, i+1);
+            if (this.punctuations.get(aPunc) != null)
+                this.addAll(set, this.insideMap.get("punc").get(aPunc));
+        }
+
+        // 中文和英文逗号+属性 过滤
+        String[] commas = {",", ","};
+        int commaIndex = 0;
+        String commaPadType = null;  // 逗号拼接上类型
+        for (String comma: commas) {
+            commaIndex = sentence.indexOf(comma, entity_1_start + 1);  // 逗号位置
+            while (commaIndex > -1 && commaIndex < entity_2_start) {
+                commaIndex = sentence.indexOf(comma, commaIndex + 1);  // 下一个逗号
+                for (int i = startIndex; i <= endIndex; i++) {  // 每个逗号与后面的所有实体都匹配一次
+                    NameTypeStartPosition nameTypeStartPosition = nameTypeStartPositions.get(i);
+                    if (nameTypeStartPosition.getStartPosition() > commaIndex) {
+                        commaPadType = "," + nameTypeStartPosition.getType();
+                        this.addAll(set, this.insideMap.get("typePunctuation").get(commaPadType));
+                    }
+
+                }
+            }
+
+        }
+
+        this.retainAll(remainUuids, set);  // 求交集,同事中间实体相关的过滤条件,且不不满足例外情况
+
+//        for (FilterRule rule: this.filterRules) {
+//            if (remainUuids.contains(rule.getUuid()))
+//                System.out.println(rule);
+//
+//        }
+
+        return remainUuids.size() > 0;  // 还有规则满足,则过滤
+
+    }
+
+    /**
+     * 求差集,避免null和空集
+     *
+     * @param basicSet
+     * @param set
+     */
+    private void removeAll(Set<Integer> basicSet, Set<Integer> set) {
+        if (set != null && set.size() > 0)
+            basicSet.removeAll(set);
+    }
+
+    /**
+     * 求交集,避免null和空集
+     *
+     * @param basicSet
+     * @param set
+     */
+    private void addAll(Set<Integer> basicSet, Set<Integer> set) {
+        if (set != null && set.size() > 0)
+            basicSet.addAll(set);
+    }
+
+    /**
+     * 求并集,避免null和空集
+     *
+     * @param basicSet
+     * @param set
+     */
+    private void retainAll(Set<Integer> basicSet, Set<Integer> set) {
+        if (set != null && set.size() > 0)
+            basicSet.retainAll(set);
+    }
+
+    /**
+     * 检查并移除
+     *
+     * @param sentence 句子
+     * @param triads 三元组列表
+     */
+    public void checkAndRemove(String sentence, List<Triad> triads) {
+        List<NameTypeStartPosition> nameTypeStartPositions = this.getSortedNameTypeByPosition(triads);
+        Map<Integer, Integer> startPositionToIndexMap = new HashMap<>();
+        for (int i = 0; i < nameTypeStartPositions.size(); i++)
+            startPositionToIndexMap.put(nameTypeStartPositions.get(i).getStartPosition(), i);
+
+        Iterator<Triad> it = triads.iterator();
+        while (it.hasNext()) {  // 遍历三元组,移除满足过滤规则的
+            Triad triad = it.next();
+            int startIndex = startPositionToIndexMap.get(triad.getL_1().getStartPosition());
+            int endIndex = startPositionToIndexMap.get(triad.getL_2().getStartPosition());
+            if (isRemove(nameTypeStartPositions, startIndex, endIndex, sentence)) {
+                it.remove();
+            }
+        }
+    }
+}

+ 3 - 4
algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutor.java

@@ -1,13 +1,12 @@
 package org.algorithm.core.cnn;
 
-import org.algorithm.core.cnn.entity.Lemma;
 import org.algorithm.core.cnn.entity.Triad;
 
 import java.util.List;
 
 /**
  * @ClassName org.algorithm.core.cnn.model.AlgorithmCNNExecutor
- * @Description TODO
+ * @Description
  * @Author fyeman
  * @Date 2019/1/17/017 19:18
  * @Version 1.0
@@ -16,8 +15,8 @@ public abstract class AlgorithmCNNExecutor {
     /**
      *
      * @param content 输入句子
-     * @param triads 实体列表
-     * @return
+     * @param triads 实体列表(三元组列表)
+     * @return  [[有关系的一系列词]]
      */
     public abstract List<Triad> execute(String content, List<Triad> triads);
 }

+ 22 - 0
algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutorPacs.java

@@ -0,0 +1,22 @@
+package org.algorithm.core.cnn;
+
+import org.algorithm.core.cnn.entity.Triad;
+
+import java.util.List;
+
+/**
+ * @ClassName org.algorithm.core.cnn.model.AlgorithmCNNExecutor
+ * @Description
+ * @Author fyeman
+ * @Date 2019/1/17/017 19:18
+ * @Version 1.0
+ **/
+public abstract class AlgorithmCNNExecutorPacs {
+    /**
+     *
+     * @param content 输入句子
+     * @param triads 实体列表(三元组列表)
+     * @return  [[有关系的一系列词]]
+     */
+    public abstract List<List<String>>  execute(String content, List<Triad> triads);
+}

+ 1 - 1
algorithm/src/main/java/org/algorithm/core/cnn/dataset/RelationExtractionDataSet.java

@@ -17,7 +17,7 @@ import com.alibaba.fastjson.JSONObject;
 public class RelationExtractionDataSet {
 
     private Map<String, Integer> char2id = new HashMap<>();
-    public final int MAX_LEN = 512;
+    public final int MAX_LEN = 256;
 
 
     public RelationExtractionDataSet(String dir) {

+ 32 - 1
algorithm/src/main/java/org/algorithm/core/cnn/entity/Lemma.java

@@ -10,12 +10,38 @@ import java.util.List;
  * @Date 2019/1/17/017 19:15
  * @Version 1.0
  **/
-public class Lemma {
+public class Lemma implements Comparable<Lemma> {
     private String text;
     private String position;
     private int len;
     private String property;
 
+    private Lemma parent;
+
+    private boolean haveChildren = false;
+
+    public boolean isHaveChildren() {
+        return haveChildren;
+    }
+
+    public void setHaveChildren(boolean haveChildren) {
+        this.haveChildren = haveChildren;
+    }
+
+    public Lemma getParent() {
+        return parent;
+    }
+
+    public void setParent(Lemma parent) {
+        this.parent = parent;
+    }
+
+    public int getStartPosition() {
+        String[] pos = this.position.split(",");
+        return Integer.parseInt(pos[0]);
+    }
+
+
     private List<Lemma> relationLemmas = new ArrayList<>();
 
     public String getText() {
@@ -64,4 +90,9 @@ public class Lemma {
         }
         relationLemmas.add(l);
     }
+
+    @Override
+    public int compareTo(Lemma o) {
+        return this.getStartPosition() - o.getStartPosition();
+    }
 }

+ 40 - 13
algorithm/src/main/java/org/algorithm/core/cnn/model/RelationExtractionEnsembleModel.java

@@ -1,6 +1,8 @@
 package org.algorithm.core.cnn.model;
 
-import org.algorithm.core.cnn.AlgorithmCNNExecutor;
+import org.algorithm.core.RelationTreeUtils;
+import org.algorithm.core.RuleCheckMachine;
+import org.algorithm.core.cnn.AlgorithmCNNExecutorPacs;
 import org.algorithm.core.cnn.dataset.RelationExtractionDataSet;
 import org.algorithm.core.cnn.entity.Triad;
 import org.diagbot.pub.utils.PropertiesUtil;
@@ -21,7 +23,7 @@ import java.util.concurrent.*;
  * @Date: 2019/1/22 10:21
  * @Description: 集成模型
  */
-public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
+public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutorPacs {
     private final String X_PLACEHOLDER = "X";
     private final String PREDICTION = "prediction/prediction";
     private final int NUM_LABEL = 1;
@@ -30,8 +32,10 @@ public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
     private RelationExtractionDataSet dataSet;
     private RelationExtractionSubModel[] subModels = new RelationExtractionSubModel[2];
     private ExecutorService executorService = Executors.newCachedThreadPool();
+    private final RuleCheckMachine ruleCheckMachine = new RuleCheckMachine();
 
     public RelationExtractionEnsembleModel() {
+        // 解析路径
         PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
 
         String modelsPath = prop.getProperty("basicPath");  // 模型基本路径
@@ -39,18 +43,20 @@ public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
         dataSetPath = dataSetPath + File.separator + "char2id.json";
         String exportDir = modelsPath.replace("model_version_replacement", "ensemble_model_2");
 
+        // 加载数据集和初始化集成模型
         this.dataSet = new RelationExtractionDataSet(dataSetPath);
         this.init(exportDir);
 
+        // 添加子模型系数,并加载子模型cnn_1d_low
         Map<String, Tensor<Float>> cnn_1d_low_map = new HashMap<>();
-        cnn_1d_low_map.put("keep_prob",Tensor.create(1.0f, Float.class));
+        cnn_1d_low_map.put("keep_prob", Tensor.create(1.0f, Float.class));
         subModels[0] = new RelationExtractionSubModel("cnn_1d_low", cnn_1d_low_map);
-//        subModels[1] = new RelationExtractionSubModel("cnn_1d_lstm_low");
 
+        // 添加子模型系数,并加载子模型lstm_low_api
         Map<String, Tensor<Float>> lstm_low_api_map = new HashMap<>();
-        lstm_low_api_map.put("input_keep_prob",Tensor.create(1.0f, Float.class));
-        lstm_low_api_map.put("output_keep_prob",Tensor.create(1.0f, Float.class));
-        lstm_low_api_map.put("state_keep_prob",Tensor.create(1.0f, Float.class));
+        lstm_low_api_map.put("input_keep_prob", Tensor.create(1.0f, Float.class));
+        lstm_low_api_map.put("output_keep_prob", Tensor.create(1.0f, Float.class));
+        lstm_low_api_map.put("state_keep_prob", Tensor.create(1.0f, Float.class));
         subModels[1] = new RelationExtractionSubModel("lstm_low_api", lstm_low_api_map);
     }
 
@@ -92,12 +98,24 @@ public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
         return inputValues;
     }
 
+
+    /**
+     * 数据预处理,包括过滤,等操作
+     * @param content
+     * @param triads
+     */
+    private void preProcess(String content, List<Triad> triads){
+        if (!(content.length() > this.dataSet.MAX_LEN) && triads.size() > 0) // 句子长度不超过MAX_LEN,有三元组
+            this.ruleCheckMachine.checkAndRemove(content, triads);
+    }
+
     @Override
-    public List<Triad> execute(String content, List<Triad> triads) {
-        // 句子长度不超过MAX_LEN,有三元组
-        if (content.length() > this.dataSet.MAX_LEN || triads.size() < 1) {
-            return new ArrayList<>();
-        }
+    public List<List<String>> execute(String content, List<Triad> triads) {
+        // 预处理
+        this.preProcess(content, triads);
+        if (content.length() > this.dataSet.MAX_LEN || triads.size() < 1)  // 句子长度不超过MAX_LEN,有三元组
+            return null;
+
         int[][] inputValues = this.convertData(content, triads);  // shape = [3, batchSize * this.subModels.length]
         int batchSize = triads.size();
 
@@ -159,7 +177,16 @@ public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
         for (Triad triad : deleteTriads)
             triads.remove(triad);
 
-        return triads;
+        return this.triadsToRelationTreeBranches(triads);
+    }
+
+    /**
+     * 从三元组列表到关系树分枝
+     * @param triads
+     * @return
+     */
+    public List<List<String>> triadsToRelationTreeBranches(List<Triad> triads) {
+        return RelationTreeUtils.triadsToRelationTreeBranches(triads);
     }
 
 

+ 4 - 4
algorithm/src/main/java/org/algorithm/core/cnn/model/RelationExtractionModel.java

@@ -4,7 +4,7 @@ import com.alibaba.fastjson.JSON;
 import com.alibaba.fastjson.JSONArray;
 import com.alibaba.fastjson.JSONObject;
 import com.alibaba.fastjson.TypeReference;
-import org.algorithm.core.cnn.AlgorithmCNNExecutor;
+import org.algorithm.core.cnn.AlgorithmCNNExecutorPacs;
 import org.algorithm.core.cnn.dataset.RelationExtractionDataSet;
 import org.algorithm.core.cnn.entity.LemmaInfo;
 import org.algorithm.core.cnn.entity.Triad;
@@ -21,7 +21,7 @@ import java.util.List;
  * @Date: 2019/1/22 10:21
  * @Decription:
  */
-public class RelationExtractionModel extends AlgorithmCNNExecutor {
+public class RelationExtractionModel extends AlgorithmCNNExecutorPacs {
 //    self.X = tf.placeholder(tf.int32, shape=[None, self.max_length], name='X')
 //    self.pos1 = tf.placeholder(tf.int32, shape=[None, self.max_length], name='pos1')
 //    self.pos2 = tf.placeholder(tf.int32, shape=[None, self.max_length], name='pos2')
@@ -54,7 +54,7 @@ public class RelationExtractionModel extends AlgorithmCNNExecutor {
     }
 
     @Override
-    public List<Triad> execute(String content, List<Triad> triads) {
+    public List<List<String>> execute(String content, List<Triad> triads) {
 //        List<Lemma[]> combinations = new ArrayList<>();
 //        // 组合
 //        for(int i=0; i < lemmas.size() - 1; i++){  // 两两组合成实体对
@@ -83,7 +83,7 @@ public class RelationExtractionModel extends AlgorithmCNNExecutor {
 //            }
 //
 //        }
-        return triads;
+        return null;
     }
 
     /**

+ 2 - 1
algorithm/src/main/java/org/algorithm/core/neural/DiagnosisPredictExecutor.java

@@ -15,7 +15,8 @@ public class DiagnosisPredictExecutor extends AlgorithmNeuralExecutor {
     public DiagnosisPredictExecutor() {
         String modelVersion = "diagnosisPredict.version";
 
-        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+//        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+        this.model = TensorFlowModelLoadFactory.createAndFilterDiagnosis(modelVersion);  // 加了疾病过滤
     }
 
 }

+ 38 - 17
algorithm/src/main/java/org/algorithm/core/neural/TensorFlowModelLoadFactory.java

@@ -6,38 +6,59 @@ import org.diagbot.pub.utils.PropertiesUtil;
 
 /**
  * Tensorlflow 模型加载工厂
+ *
  * @Author: bijl
  * @Date: 2018年7月19日-下午7:28:58
  * @Description:
  */
 public class TensorFlowModelLoadFactory {
-    
+
     /**
      * 加载并创建模型类
-     * @param exportDir  模型保存地址
-     * @param inputOpName  输入op的名称
-     * @param outputOpName  输出op的名称
-     * @param dataSet     模型使用的数据集
+     *
+     * @param modelVersion 模型版本号
      * @return 模型
      */
     public static TensorflowModel create(String modelVersion) {
-        
-        
+
+
         PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
-        
-        String inputOpName = "X";  // 统一输入op名称
-        String outputOpName = "softmax/softmax";  // 统一输出op名称
-        
-        // TODO:修改的地方
+
+
 //        NNDataSet dataSet = new NNDataSetImplNonParallel(modelVersion);  // 新模型
         NNDataSet dataSet = new NNDataSetImpl(modelVersion);  // 老模型
-        
-        String modelPath =prop.getProperty("basicPath");  // 模型基本路径
+
+        String modelPath = prop.getProperty("basicPath");  // 模型基本路径
+        modelVersion = prop.getProperty(modelVersion);
+        modelPath = modelPath.replace("model_version_replacement", modelVersion);  // 生成模型路径
+
+        TensorflowModel tm = new TensorflowModel(modelPath, dataSet);
+        return tm;
+    }
+
+    /**
+     * 加载并创建模型类
+     *
+     * @param modelVersion 模型版本号
+     * @return 模型
+     */
+    public static TensorflowModel createAndFilterDiagnosis(String modelVersion) {
+
+
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+
+        NNDataSet dataSet = new NNDataSetImpl(modelVersion);  // 老模型
+
+        dataSet.setDoFilterDiagnosis(true);
+        dataSet.readFilterDiagnosisDict();
+        dataSet.setWithSequenceInputs(true);  // 使用序列输入
+        dataSet.readChar2IdDict(modelVersion);  // 读取字符字典
+
+        String modelPath = prop.getProperty("basicPath");  // 模型基本路径
         modelVersion = prop.getProperty(modelVersion);
         modelPath = modelPath.replace("model_version_replacement", modelVersion);  // 生成模型路径
-        
-        TensorflowModel tm = new TensorflowModel(modelPath, inputOpName, outputOpName,
-                dataSet);
+
+        TensorflowModel tm = new TensorflowModel(modelPath, dataSet);
         return tm;
     }
 

+ 104 - 36
algorithm/src/main/java/org/algorithm/core/neural/TensorflowModel.java

@@ -6,43 +6,55 @@ import org.tensorflow.Session;
 import org.tensorflow.Tensor;
 
 import java.nio.FloatBuffer;
+import java.nio.IntBuffer;
+import java.util.HashMap;
 import java.util.Map;
 
 /**
  * tensorflow 模型类,要求单个样本是1维向量,而不是高维向量
+ *
  * @Author: bijl
  * @Date: 2018年7月19日-下午7:21:24
  * @Description:
  */
 public class TensorflowModel {
-    
-    private final String INPUT_OPERATION_NAME;   // 输入op的名称
-    private final String OUTPUT_OPERATION_NAME;  // 输出op的名称
+
+
+    private final String X = "X";  // 输入op x的名字
+    private final String Char_ids = "Char_ids";  // 输入op Char_ids的名字
+    private final String Pos_ids = "Pos_ids";  // 输入op Pos_ids的名字
+    private final String SOFT_MAX = "softmax/softmax";  // 输出op的名称
+
     private final int NUM_FEATURE;  // 特征个数
     private final int NUM_LABEL;  //  标签(类别)个数
     private SavedModelBundle bundle; // 模型捆绑
     private Session session;  // 会话
     private NNDataSet dataSet;  // 数据集
-    
+
+
+    private boolean withSequenceInputs = false;  // 是否带有序列输入
+    private final int MAX_LEN; // 最大长度
+
+
     /**
-     * 
-     * @param exportDir  模型保存地址
-     * @param inputOpName  输入op的名称
-     * @param outputOpName  输出op的名称
-     * @param dataSet  模型使用的数据集
+     * @param exportDir 模型保存地址
+     * @param dataSet   模型使用的数据集
      */
-    public TensorflowModel(String exportDir, String inputOpName, String outputOpName, NNDataSet dataSet) {
-        this.INPUT_OPERATION_NAME = inputOpName;
-        this.OUTPUT_OPERATION_NAME = outputOpName;
+    public TensorflowModel(String exportDir, NNDataSet dataSet) {
+
+        this.init(exportDir);
         this.dataSet = dataSet;
         this.NUM_FEATURE = this.dataSet.getNumFeature();
         this.NUM_LABEL = this.dataSet.getNumLabel();
-        this.init(exportDir);
-                
+
+        // 序列数据有段的属性
+        this.MAX_LEN = this.dataSet.getMAX_LEN();
+        this.withSequenceInputs = this.dataSet.isWithSequenceInputs();
     }
-    
+
     /**
      * 初始化:加载模型,获取会话。
+     *
      * @param exportDir
      */
     public void init(String exportDir) {
@@ -54,29 +66,77 @@ public class TensorflowModel {
         }
 
         // create the session from the Bundle
-        this.session = bundle.session(); 
+        this.session = bundle.session();
+    }
+
+
+    /**
+     * 包装序列化输入
+     *
+     * @param sequenceValuesMap 序列输入的map
+     * @param numExamples       样本数
+     * @return
+     */
+    private Map<String, Tensor<Integer>> wrapSequenceInputs(Map<String, int[]> sequenceValuesMap, int numExamples) {
+        long[] inputShape = {numExamples, this.MAX_LEN};
+        Map<String, Tensor<Integer>> sequenceTensorMap = new HashMap<>();
+        for (Map.Entry<String, int[]> entry : sequenceValuesMap.entrySet()) {
+            String mapKey = entry.getKey();
+            Tensor<Integer> inputTensor = Tensor.create(
+                    inputShape,
+                    IntBuffer.wrap(entry.getValue())
+            );
+            sequenceTensorMap.put(mapKey, inputTensor);
+        }
+
+        return sequenceTensorMap;
     }
-    
+
+
     /**
      * 运行模型
-     * @param inputValues  输入值
-     * @param numExamples  样本个数
+     *
+     * @param inputValues 输入值
+     * @param numExamples 样本个数
      * @return 模型的输出
      */
-    private float[][] run(float[] inputValues, int numExamples){
-//        long[] inputShape = {numExamples, this.NUM_FEATURE, 4, 1};  // 新模型
-        long[] inputShape = {numExamples, this.NUM_FEATURE};  // 老模型
+    private float[][] run(float[] inputValues, Map<String, int[]> sequenceValues, int numExamples) {
+        long[] inputShape = {numExamples, this.NUM_FEATURE};
         Tensor<Float> inputTensor = Tensor.create(
-                inputShape,  
-                FloatBuffer.wrap(inputValues) 
+                inputShape,
+                FloatBuffer.wrap(inputValues)
         );
-        return this.session.runner().feed(this.INPUT_OPERATION_NAME, inputTensor)
-                .feed("keep_prob", Tensor.create(1.0f, Float.class))  // dropout保留率
-                .fetch(this.OUTPUT_OPERATION_NAME).run().get(0)
-                .copyTo(new float[numExamples][this.NUM_LABEL]);
+
+        float[][] result = null;
+        Tensor<?> t = null;
+        // 序列数据
+        if (this.withSequenceInputs){
+            Map<String, Tensor<Integer>> sequenceTensorMap = this.wrapSequenceInputs(sequenceValues, numExamples);
+
+            t = this.session.runner().feed(this.X, inputTensor)
+                    .feed(this.Char_ids, sequenceTensorMap.get(this.Char_ids))
+                    .feed(this.Pos_ids, sequenceTensorMap.get(this.Pos_ids))
+                    .feed("keep_prob", Tensor.create(1.0f, Float.class))  // dropout保留率
+                    .fetch(this.SOFT_MAX).run().get(0);
+
+            for (Map.Entry<String, Tensor<Integer>> entry : sequenceTensorMap.entrySet()) {
+                entry.getValue().close();
+            }
+
+        }else{
+            t =  this.session.runner().feed(this.X, inputTensor)
+                    .feed("keep_prob", Tensor.create(1.0f, Float.class))  // dropout保留率
+                    .fetch(this.SOFT_MAX).run().get(0);
+        }
+        result = t.copyTo(new float[numExamples][this.NUM_LABEL]);
+
+        t.close();
+        inputTensor.close();
+
+        return result;
     }
-    
-    
+
+
     /**
      * 运行模型,并将结果打包成目标格式
      */
@@ -85,14 +145,22 @@ public class TensorflowModel {
         float sum = 0;
         for (float f : inputValues)
             sum += f;
-        if(sum == 0)  // 如果输入没有有效特征,则直接返回null
+        if (sum == 0)  // 如果输入没有有效特征,则直接返回null
             return null;
-        
-        float[][] predict = this.run(inputValues, 1);  // 一次一个样本
-        return this.dataSet.wrap(predict);  
+
+        Map<String, int[]> sequenceValues = null;
+        if (this.withSequenceInputs){
+            sequenceValues = new HashMap<>();
+            sequenceValues.put(this.Char_ids, this.dataSet.toCharIds(inputs));
+            sequenceValues.put(this.Pos_ids, this.dataSet.toPosIds(inputs));
+        }
+
+
+        float[][] predict = this.run(inputValues, sequenceValues, 1);  // 一次一个样本
+        return this.dataSet.wrap(predict);
     }
-    
-    
+
+
     /**
      * 关闭会话,释放资源
      */

+ 308 - 13
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSet.java

@@ -1,10 +1,10 @@
 package org.algorithm.core.neural.dataset;
 
-import java.util.HashMap;
-import java.util.Map;
+import java.util.*;
 
 /**
  * 神经网络用数据处理模块
+ *
  * @Author: bijl
  * @Date: 2018年7月20日-下午4:01:34
  * @Description:
@@ -13,17 +13,35 @@ public abstract class NNDataSet {
     protected final int NUM_FEATURE;
     private final int NUM_LABEL;
     protected final Map<String, Integer> FEATURE_DICT = new HashMap<>();
-    
+
     // 新版本新加的三种关键词
     protected final Map<String, Integer> PARTBODY_DICT = new HashMap<>();
     protected final Map<String, Integer> PROPERTY_DICT = new HashMap<>();
     protected final Map<String, Integer> DURATION_DICT = new HashMap<>();
-    
+
     protected final Map<String, Integer> LABEL_DICT = new HashMap<>();
     protected final Map<String, Integer> NEGATIVE_DICT = new HashMap<>();
+
     private final String[] FEATURE_DICT_ARRAY;
     private final String[] LABEL_DICT_ARRAY;
 
+    // 再分词和疾病过滤相关容器
+    protected final Map<String, String> RE_SPLIT_WORD_DICT = new HashMap<>();  // 在分词表
+    protected List<String> FEATURE_NAME_STORE = new ArrayList<>();  // 特征保存
+    protected final Map<String, Map<String, Integer>> RELATED_DIAGNOSIS_DICT = new HashMap<>();  // 特征与疾病相关表
+    private boolean doFilterDiagnosis = false;  // 是否做疾病过滤
+
+    private final float firstRateThreshold = 0.1f;  // 第一个疾病的概率阈值
+    private final float lastRateThreshold = 0.005f;  // 最后一个概率阈值
+    private final float rateSumThreshold = 0.6f;  // 概率和阈值
+    private final int numToPush = 3;  // 推荐推送的个数
+    private final float rapidFallTimes = 5;  // 骤降倍数
+
+    // 序列数据
+    private final int MAX_LEN = 257;
+    private boolean withSequenceInputs = false;  // 是否带有序列输入
+    protected final Map<String, Integer> CHAR2ID_DICT = new HashMap<>();
+
 
     public NNDataSet(String modelAndVersion) {
         this.readDict(modelAndVersion);
@@ -32,41 +50,290 @@ public abstract class NNDataSet {
         this.FEATURE_DICT_ARRAY = new String[this.NUM_FEATURE];
         this.LABEL_DICT_ARRAY = new String[this.NUM_LABEL];
         this.makeDictArr();
+        this.readReSplitWordDict();
+
+
     }
-    
+
     /**
      * 装外部输入转为特征向量
+     *
      * @param inputs
      * @return
      */
     public abstract float[] toFeatureVector(Map<String, Map<String, String>> inputs);
 
+    /**
+     * 装外部输入转为字符ids
+     *
+     * @param inputs
+     * @return
+     */
+    public abstract int[] toCharIds(Map<String, Map<String, String>> inputs);
+
+    /**
+     * 装外部输入转为位置ids
+     *
+     * @param inputs
+     * @return
+     */
+    public abstract int[] toPosIds(Map<String, Map<String, String>> inputs);
+
     /**
      * 读取特征和类别字典
      */
     public abstract void readDict(String modelAndVersion);
-    
+
+
+    /**
+     * 读取特征和类别字典
+     */
+    public abstract void readChar2IdDict(String modelAndVersion);
+
+    /**
+     * 读取再分词字典
+     */
+    public abstract void readReSplitWordDict();
+
+    /**
+     * 读取过滤字典
+     */
+    public abstract void readFilterDiagnosisDict();
+
     /**
      * 生成字典列表
      */
     private void makeDictArr() {
-        for (Map.Entry<String, Integer> entry : this.FEATURE_DICT.entrySet()) 
+        for (Map.Entry<String, Integer> entry : this.FEATURE_DICT.entrySet())
             this.FEATURE_DICT_ARRAY[entry.getValue()] = entry.getKey();
-        
-        for (Map.Entry<String, Integer> entry : this.LABEL_DICT.entrySet()) 
+
+        for (Map.Entry<String, Integer> entry : this.LABEL_DICT.entrySet())
             this.LABEL_DICT_ARRAY[entry.getValue()] = entry.getKey();
-        
+
+    }
+
+    /**
+     * 推送个数过滤[无效病历]
+     * 规则:最大概率疾病的概率要超过给定阈值,如果不超过,则认为疾病不收敛,不予推送
+     *
+     * @param nameAndValueListSorted
+     */
+    private void pushCountFilterBefore(List<NameAndValue> nameAndValueListSorted) {
+        if (nameAndValueListSorted.get(0).getValue() < this.firstRateThreshold)
+            nameAndValueListSorted.clear();
+    }
+
+    /**
+     * 推送个数过滤[概率和和概率骤降过滤]
+     * 规则:
+     * 1- 为了防止一棍子打死,我们还是尽量要推送3个病历的,除非概率骤降。
+     * 2- 概率骤降过滤,当病历收敛到一个或几个疾病之后,再出现的疾病,概率会骤然下降很多倍
+     * ,这时,这个疾病差不多是随机推送的,因此要过滤掉。【都要做】
+     * 2- 概率和,就是概率和不超过某个阈值【只有在剩余疾病个数超过阈值时做】
+     *
+     * @param nameAndValueListSorted
+     */
+    private void pushCountFilterAfter(List<NameAndValue> nameAndValueListSorted) {
+
+        // 如果不超过尽量推送的个数,只做概率骤降判断
+        Iterator<NameAndValue> it = nameAndValueListSorted.iterator();
+        boolean deleteTheRest = false;   // 是否删除剩余的疾病
+        float preRate = 0.0f; // 前一个疾病的概率
+        int restCnt = 0;  // 剩余疾病数
+        float rateSum = 0.0f;  // 概率和
+
+        while (it.hasNext()) {
+//            NameAndValue nameAndValue = it.next();
+//            if (!deleteTheRest) {
+//                // 相对于前一个疾病概率骤降rapidFallTimes倍
+//                if (preRate / nameAndValue.getValue() >= this.rapidFallTimes)
+//                    deleteTheRest = true;
+//                else {
+//                    rateSum += nameAndValue.getValue();
+//                    preRate = nameAndValue.getValue();
+//                    restCnt += 1;
+//                }
+//            }
+//
+//            if (deleteTheRest)  // 删除剩下的疾病
+//                it.remove();
+//
+//
+//            if (!deleteTheRest && restCnt >= this.numToPush) {
+//
+//                // 如果超过尽量推送的个数,那么做概率和阈值过滤【从下一个开始删除】
+//                if (rateSum >= this.rateSumThreshold)
+//                    deleteTheRest = true;
+//            }
+
+            NameAndValue nameAndValue = it.next();
+            if (!deleteTheRest) {
+                // 最后一个必须大于某个阈值
+                if (nameAndValue.getValue() < this.lastRateThreshold)
+                    deleteTheRest = true;
+            }
+
+            if (deleteTheRest)  // 删除剩下的疾病
+                it.remove();
+
+        }
+
+    }
+
+    /**
+     * 打包特征名和概率 + 过滤疾病 + 推送个数选择
+     * 基本操作,过滤前20个疾病,如果有疾病留下,否则前50个疾病
+     *
+     * @param predict 模型输出
+     * @return
+     */
+    public Map<String, Float> wrapAndFilterWithPushCountFilter(float[][] predict) {
+        List<NameAndValue> nameAndValueList = new ArrayList<>();
+        for (int i = 0; i < predict[0].length; i++)
+            nameAndValueList.add(new NameAndValue(this.LABEL_DICT_ARRAY[i], predict[0][i]));
+        nameAndValueList.sort(Comparator.reverseOrder());  // 按概率从大到小排列
+
+//        System.out.println("原来__推送:...............................................................");
+//        System.out.println(nameAndValueList.subList(0, 10));
+
+        pushCountFilterBefore(nameAndValueList);  // 推送个数过滤【无效病历过滤】
+
+//        nameAndValueList = filterDiagnosis(nameAndValueList);  // 疾病过滤
+
+        this.pushCountFilterAfter(nameAndValueList);  // 推送个数过滤【概率骤降和概率和阈值过滤】
+
+//        System.out.println("新版本__最终__推送:.......................................................");
+//        System.out.println("长度:" + nameAndValueList.size());
+//        System.out.println(nameAndValueList);
+
+        Map<String, Float> result = new HashMap<>();
+        for (NameAndValue nameAndValue : nameAndValueList)
+            result.put(nameAndValue.getName(), nameAndValue.getValue());
+
+        return result;
+    }
+
+    /**
+     * 疾病过滤
+     * 基本规则:
+     * 如果没有一个特征与该疾病共现过,那么删除该疾病
+     *
+     * @param nameAndValueListSorted
+     * @return
+     */
+    public List<NameAndValue> filterDiagnosis(List<NameAndValue> nameAndValueListSorted) {
+        Integer cnt = 0;
+        String diagnosis;
+        NameAndValue nameAndValue;
+        Map<String, Integer> relatedDiagnoses = null;
+        List<NameAndValue> candidateNameAndValues = new ArrayList<>();
+        for (int i = 0; i < nameAndValueListSorted.size(); i++) {
+            nameAndValue = nameAndValueListSorted.get(i);
+            diagnosis = nameAndValue.getName();
+
+            for (String featureName : this.FEATURE_NAME_STORE) {
+                relatedDiagnoses = this.RELATED_DIAGNOSIS_DICT.get(featureName);
+                if (relatedDiagnoses != null && relatedDiagnoses.get(diagnosis) != null) {
+                    candidateNameAndValues.add(nameAndValue);
+                    cnt += 1;
+                    break;  // 有一个共现即可
+                }
+            }
+            if ((i >= 20 || i >= 50) && cnt > 0)  // 如果前20或50个推送中有相关的疾病,只过滤他们
+                break;
+        }
+        return candidateNameAndValues;
+    }
+
+    /**
+     * 打包特征名和概率 + 过滤疾病
+     * 基本操作,过滤前20个疾病,如果
+     *
+     * @param predict 模型输出
+     * @return
+     */
+    public Map<String, Float> wrapAndFilter(float[][] predict) {
+        List<NameAndValue> nameAndValueList = new ArrayList<>();
+        for (int i = 0; i < predict[0].length; i++)
+            nameAndValueList.add(new NameAndValue(this.LABEL_DICT_ARRAY[i], predict[0][i]));
+        nameAndValueList.sort(Comparator.reverseOrder());  // 按概率从大到小排列
+
+        nameAndValueList = filterDiagnosis(nameAndValueList);  // 疾病过滤
+
+//        System.out.println("原版本__最终__推送 ......................................................");
+//        System.out.println("长度:" + nameAndValueList.size());
+//        System.out.println(nameAndValueList);
+
+        Map<String, Float> result = new HashMap<>();
+        for (NameAndValue nameAndValue : nameAndValueList)
+            result.put(nameAndValue.getName(), nameAndValue.getValue());
+        return result;
+    }
+
+    /**
+     * 用于排序的类
+     */
+    class NameAndValue implements Comparable<NameAndValue> {
+
+        private String name;
+        private Float value;
+
+        NameAndValue(String name, Float value) {
+            this.name = name;
+            this.value = value;
+        }
+
+        @Override
+        public int compareTo(NameAndValue o) {
+            if (this.value > o.getValue())
+                return 1;
+            else if (this.value.equals(o.getValue()))
+                return 0;
+            else
+                return -1;
+        }
+
+        public Float getValue() {
+            return value;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public String toString() {
+            return "NameAndValue{" +
+                    "name='" + name + '\'' +
+                    ", value=" + value +
+                    '}';
+        }
     }
 
     /**
      * 打包模型输出结果给调用者
-     * 
+     *
      * @param predict 模型输出
      * @return
      */
     public Map<String, Float> wrap(float[][] predict) {
+        if (this.doFilterDiagnosis)  // 过滤疾病
+        {
+            return this.wrapAndFilterWithPushCountFilter(predict);
+        } else
+            return this.basicWrap(predict);
+    }
+
+
+    /**
+     * 打包模型输出结果给调用者
+     *
+     * @param predict 模型输出
+     * @return
+     */
+    public Map<String, Float> basicWrap(float[][] predict) {
         Map<String, Float> result = new HashMap<>();
-        for (int i=0; i<predict[0].length; i++) {  // 只返回一维向量
+        for (int i = 0; i < predict[0].length; i++) {  // 只返回一维向量
             result.put(this.LABEL_DICT_ARRAY[i], predict[0][i]);
         }
         return result;
@@ -80,10 +347,38 @@ public abstract class NNDataSet {
     }
 
     /**
-     * @return
+     * 存储特征名称
+     *
+     * @param features
      */
+    public void storeFeatureNames(Map<String, Map<String, String>> features) {
+//        this.FEATURE_NAME_STORE.size();  // this.FEATURE_NAME_STORE.clear() 未知原因会出现数据越界异常,加了这个则没有了
+//        this.FEATURE_NAME_STORE.clear();
+        this.FEATURE_NAME_STORE = new ArrayList<>();
+        this.FEATURE_NAME_STORE.addAll(features.keySet());
+    }
+
     public int getNumLabel() {
         return this.NUM_LABEL;
     }
 
+
+    public void setDoFilterDiagnosis(boolean doFilterDiagnosis) {
+        this.doFilterDiagnosis = doFilterDiagnosis;
+    }
+
+
+    public int getMAX_LEN() {
+        return MAX_LEN;
+    }
+
+
+    public void setWithSequenceInputs(boolean withSequenceInputs) {
+        this.withSequenceInputs = withSequenceInputs;
+    }
+
+
+    public boolean isWithSequenceInputs() {
+        return withSequenceInputs;
+    }
 }

+ 185 - 74
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImpl.java

@@ -3,14 +3,15 @@ package org.algorithm.core.neural.dataset;
 import org.algorithm.util.TextFileReader;
 import org.diagbot.pub.utils.PropertiesUtil;
 
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.*;
 import java.util.Map.Entry;
 
 /**
  * 门诊诊断推送用数据集
- * 
+ *
  * @Author: bijl
  * @Date: 2018年7月26日-上午10:19:43
  * @Description:
@@ -22,9 +23,13 @@ public class NNDataSetImpl extends NNDataSet {
         super(modelAndVersion);
     }
 
-
     @Override
     public float[] toFeatureVector(Map<String, Map<String, String>> inputs) {
+
+        // 新添加的操作
+        this.reSplitWord(inputs);  // 再分词
+        this.storeFeatureNames(inputs);  // 保存特征名称
+
         float[] featureVector = new float[this.NUM_FEATURE];
 
         Iterator<Entry<String, Map<String, String>>> entries = inputs.entrySet().iterator();
@@ -32,13 +37,9 @@ public class NNDataSetImpl extends NNDataSet {
         String featureName = "";
         Integer position = -1;
         Integer negative = 0;
-        // Integer partbodyValue = 0;
         float positive_value = 1.0f;
         float negative_value = -1.0f;
         Map<String, String> featureValues = null;
-        // String partbody = null;
-        // String[] partbodys = null;
-        // String sn = null;
 
         /**
          * 数据方案设计
@@ -51,11 +52,6 @@ public class NNDataSetImpl extends NNDataSet {
             featureValues = entry.getValue();
             position = this.FEATURE_DICT.get(featureName);
             negative = NEGATIVE_DICT.get(featureValues.get("negative"));
-            // 突出主症状的数据方案
-            // sn = featureValues.get("sn");
-            // if("0".equals(sn)) {
-            // negative = negative * 10;
-            // }
 
             if (position != null)
                 if (negative == 1)
@@ -65,91 +61,72 @@ public class NNDataSetImpl extends NNDataSet {
                 else
                     System.out.println("New Nagetive! This may lead to an error.");
 
+        }
 
+        return featureVector;
+    }
 
-            /**
-             * 部位附属症状数据表示方案 partbodyValue = this.PARTBODY_DICT.get(featureValues.get("partbody"));
-             * if(partbodyValue != null) { value = 1.0f * partbodyValue /
-             * this.PARTBODY_DICT.get("NULL"); // 部位值表示 value = (float)(Math.round(value *
-             * 100000))/100000; // 保留5位有效数字 } value = negative * value; featureVector[position] =
-             * value;
-             * 
-             */
-
+    @Override
+    public int[] toCharIds(Map<String, Map<String, String>> inputs) {
+        String sentence = inputs.get("sentence").get("sentence");
+        int max_len = this.getMAX_LEN();
+        int[] ids = new int[max_len];
+        char ch = '1';
+        Integer id = null;
+        for (int i = 0; i < sentence.length() && i < max_len; i++) {  // 不超过最大长度
+            ch = sentence.charAt(i);
+            id = this.CHAR2ID_DICT.get(String.valueOf(ch));
+            if (id == null) {
+                id = this.CHAR2ID_DICT.get("<UNC>");
+            }
+            ids[i] = id;
         }
+        for (int i = sentence.length(); i < max_len; i++)  // padding
+            ids[i] = this.CHAR2ID_DICT.get("<PAD>");
 
-        return featureVector;
+        return ids;
     }
 
+    @Override
+    public int[] toPosIds(Map<String, Map<String, String>> inputs) {
+        int max_len = this.getMAX_LEN();
+        String sentence = inputs.get("sentence").get("sentence");
+        int[] pos_ids = new int[max_len];
+        for (int j=0; j<max_len; j++)
+            pos_ids[j] = max_len - 1;  // 位置的padding
 
-    /**
-     * 读取字典
-     */
-//     @Override
-//     public void readDict(String modelAndVersion) {
-//    
-//     PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
-//     String model_version = prop.getProperty(modelAndVersion);
-//     model_version = model_version.trim();
-//    
-//     String url = "jdbc:mysql://192.168.2.235/diagbot-app?user=root&password=diagbot@20180822";
-//     MysqlConnector connector = new MysqlConnector(url);
-//     String querySql = "SELECT md._name, md._index, md.type_id " + "FROM model_dictionary AS md "
-//     + "WHERE md.belong_model = 'outpatient_model'";
-//    
-//     querySql = querySql.replace("outpatient_model", model_version);
-//     ResultSet rs = connector.query(querySql);
-//     try {
-//     while (rs.next()) {
-//     int type_id = rs.getInt("type_id");
-//     int _index = rs.getInt("_index");
-//     String _name = rs.getString("_name");
-//    
-//     if (type_id == 1)
-//     this.FEATURE_DICT.put(_name, _index);
-//     else if (type_id == 2)
-//     this.LABEL_DICT.put(_name, _index);
-//     else if (type_id == 8)
-//     this.NEGATIVE_DICT.put(_name, _index);
-//    
-//     }
-//    
-//     System.out.println("feature size:"+this.FEATURE_DICT.size());
-//    
-//     } catch (SQLException e) {
-//     e.printStackTrace();
-//     throw new RuntimeException("加载特征和类别字典失败");
-//     } finally {
-//     connector.close();
-//     }
-//    
-//     }
+        // 绝对位置编码
+        for (int i = 0 ; i < (sentence.length() < max_len ? sentence.length() : max_len); i++)
+            pos_ids[i] = i;
+
+        return pos_ids;
+    }
 
     @Override
     public void readDict(String modelAndVersion) {
-        
+
         PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
         String model_version = prop.getProperty(modelAndVersion);
 
         String filePath = prop.getProperty("basicPath");  // 基本目录
         filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
-        
+
         filePath = filePath + "dictionaries.bin";  // 字典文件位置
-        
+
         List<String> lines = TextFileReader.readLines(filePath);
 
         boolean firstLine = true;
-        
+
         String[] temp = null;
         for (String line : lines) {
             if (firstLine) {  // 去除第一行
                 firstLine = false;
                 continue;
             }
-            
+
             temp = line.split("\\|");
-            
-            if(temp[3].equals(model_version)){
+
+            if (temp[3].equals(model_version)) {
                 int type_id = Integer.parseInt(temp[2]);
                 int _index = Integer.parseInt(temp[1]);
                 String _name = temp[0];
@@ -164,8 +141,142 @@ public class NNDataSetImpl extends NNDataSet {
 
         }
 
-        System.out.println("feature size:" + this.FEATURE_DICT.size());
+//        System.out.println("feature size:" + this.FEATURE_DICT.size());
+
+    }
+
+    @Override
+    public void readChar2IdDict(String modelAndVersion) {
+
+        // 获取文件目录
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+        String filePath = prop.getProperty("basicPath");  // 基本目录
+        filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
+
+        filePath = filePath + "char2id.bin";  // 字典文件位置
+
+        // 读取以json字符串保存的数据
+        BufferedReader br = null;
+        try {
+            br = new BufferedReader(new FileReader(filePath));  // 读取原始json文件
+            String line = null;
+            String[] pair = null;
+            while ((line = br.readLine()) != null) {
+                line = line.trim();
+                if (line.indexOf("_|_") > -1){
+                    pair = line.split("_\\|_");
+                    this.CHAR2ID_DICT.put(pair[0], Integer.parseInt(pair[1]));
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            try {
+                br.close();
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
+
+    }
+
+    /**
+     * 再分词:
+     * 基本操作:
+     * 如果再分词表中有某一词项,则移除它,并添加该此项对应的细分词项
+     *
+     * @param inputs 输入
+     */
+    public void reSplitWord(Map<String, Map<String, String>> inputs) {
+        Iterator<Entry<String, Map<String, String>>> entries = inputs.entrySet().iterator();
+
+        String featureName = "";
+        String[] splitWords = null;
+        Map<String, String> featureValues = null;
+        Entry<String, Map<String, String>> entry;
+
+        Map<String, Map<String, String>> tempHashMap = new HashMap<>();  // 用于暂存key, value
+
+        while (entries.hasNext()) {
+            entry = entries.next();
+            featureName = entry.getKey();
+            if (this.FEATURE_DICT.get(featureName) == null  // 特征字典中没有然后再分词
+                    && this.RE_SPLIT_WORD_DICT.get(featureName) != null) {
+                entries.remove();  // 移除该词项
+                splitWords = this.RE_SPLIT_WORD_DICT.get(featureName).split(",");
+                for (String word : splitWords) {  // 添加细分词项
+                    featureValues = new HashMap<>();
+                    featureValues.put("negative", "有"); // 设置为阳性词
+                    tempHashMap.put(word, featureValues);
+                }
+
+            }
+        }
+
+        inputs.putAll(tempHashMap);
+    }
+
+    @Override
+    public void readReSplitWordDict() {
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+        String filePath = prop.getProperty("basicPath");  // 基本目录
+        filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
+
+        filePath = filePath + "re_split_word.bin";  // 字典文件位置
+
+        List<String> lines = TextFileReader.readLines(filePath);
+
+        boolean firstLine = true;
+
+        String[] temp = null;
+        Map<String, String> feature_map = null;
+        for (String line : lines) {
+            if (firstLine) {  // 去除第一行
+                firstLine = false;
+                continue;
+            }
+
+            temp = line.split("\\|");
+
+            this.RE_SPLIT_WORD_DICT.put(temp[0], temp[1]);
+
+        }
+
+//        System.out.println("再分词,词条数:" + this.RE_SPLIT_WORD_DICT.size());
+
+    }
+
+    @Override
+    public void readFilterDiagnosisDict() {
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+        String filePath = prop.getProperty("basicPath");  // 基本目录
+        filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
+
+        filePath = filePath + "filter_diagnoses.bin";  // 字典文件位置
+
+        List<String> lines = TextFileReader.readLines(filePath);
+
+        boolean firstLine = true;
+
+        String[] temp = null;
+        String[] diagnoses = null;
+        Map<String, Integer> diagnosis_map = null;
+        for (String line : lines) {
+            if (firstLine) {  // 去除第一行
+                firstLine = false;
+                continue;
+            }
+
+            temp = line.split("\\|");
+            diagnoses = temp[1].split("_");
+            diagnosis_map = new HashMap<>();
+            for (String diagnosis: diagnoses)
+                diagnosis_map.put(diagnosis, 1);
+            this.RELATED_DIAGNOSIS_DICT.put(temp[0], diagnosis_map);
+        }
 
+//        System.out.println("疾病过滤字典大小:" + this.RELATED_DIAGNOSIS_DICT.size());
     }
 
+
 }

+ 26 - 1
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImplNonParallel.java

@@ -22,7 +22,17 @@ public class NNDataSetImplNonParallel extends NNDataSet {
         super(modelAndVersion);
     }
 
-    
+
+    @Override
+    public void readReSplitWordDict() {
+
+    }
+
+    @Override
+    public void readFilterDiagnosisDict() {
+
+    }
+
     @Override
     public float[] toFeatureVector(Map<String, Map<String, String>> inputs) {
         // inputs {症状名:{partbody:部位名, property:属性名, duration:时间类别, sex:性别值, age:年龄值}
@@ -122,6 +132,16 @@ public class NNDataSetImplNonParallel extends NNDataSet {
         return results;
     }
 
+    @Override
+    public int[] toCharIds(Map<String, Map<String, String>> inputs) {
+        return new int[0];
+    }
+
+    @Override
+    public int[] toPosIds(Map<String, Map<String, String>> inputs) {
+        return new int[0];
+    }
+
 
     /**
      * 读取字典
@@ -176,4 +196,9 @@ public class NNDataSetImplNonParallel extends NNDataSet {
 
     }
 
+    @Override
+    public void readChar2IdDict(String modelAndVersion) {
+
+    }
+
 }

+ 33 - 0
algorithm/src/main/java/org/algorithm/factory/RelationExtractionFactory.java

@@ -0,0 +1,33 @@
+package org.algorithm.factory;
+
+import org.algorithm.core.cnn.AlgorithmCNNExecutorPacs;
+import org.algorithm.core.cnn.model.RelationExtractionEnsembleModel;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/9/10 15:25
+ */
+public class RelationExtractionFactory {
+    private static RelationExtractionEnsembleModel relationExtractionEnsembleModelInstance = null;
+
+    public static AlgorithmCNNExecutorPacs getInstance() {
+        try {
+            relationExtractionEnsembleModelInstance = (RelationExtractionEnsembleModel) create(relationExtractionEnsembleModelInstance, RelationExtractionEnsembleModel.class);
+        } catch (InstantiationException inst) {
+            inst.printStackTrace();
+        } catch (IllegalAccessException ille) {
+            ille.printStackTrace();
+        }
+        return relationExtractionEnsembleModelInstance;
+    }
+
+    private static Object create(Object obj, Class cls) throws InstantiationException, IllegalAccessException {
+        if (obj == null) {
+            synchronized (cls) {
+                obj = cls.newInstance();
+            }
+        }
+        return obj;
+    }
+}

+ 5 - 3
algorithm/src/main/java/org/algorithm/test/ReEnsembleModelTest.java

@@ -18,7 +18,7 @@ public class ReEnsembleModelTest {
 
     public static void main(String[] args) {
         RelationExtractionEnsembleModel ensembleModel = new RelationExtractionEnsembleModel();
-
+        List<List<String>> result = new ArrayList<>();
         List<Triad> triads = new ArrayList<>();
         Triad triad_1 = new Triad();
         Lemma l_1 = new Lemma();
@@ -36,9 +36,11 @@ public class ReEnsembleModelTest {
 
         long start = System.nanoTime();
         for (int i=0; i<200; i++)  // 重复100次
-            triads = ensembleModel.execute("患者剧烈胸痛头痛失眠不安", triads);
+        {
+            result = ensembleModel.execute("患者剧烈胸痛头痛失眠不安", triads);
+        }
         long elapsedTime = System.nanoTime() - start;
-        System.out.println(triads.size());
+        System.out.println(result.size());
         System.out.println(elapsedTime);
     }
 }

+ 9 - 4
algorithm/src/main/java/org/algorithm/test/TensorflowExcutorTest.java

@@ -1,5 +1,6 @@
 package org.algorithm.test;
 
+import org.algorithm.core.neural.DiagnosisPredictExecutor;
 import org.algorithm.core.neural.SymptomPredictExecutor;
 import org.algorithm.util.Utils;
 
@@ -13,9 +14,9 @@ public class TensorflowExcutorTest {
         
         //TODO:change VitalPredictExcutor to test different executors
 //        VitalPredictExecutor excutor = new VitalPredictExecutor();
-        SymptomPredictExecutor excutor = new SymptomPredictExecutor();
+//        SymptomPredictExecutor excutor = new SymptomPredictExecutor();
 //        LisPredictExecutor excutor = new LisPredictExecutor();
-//        DiagnosisPredictExecutor excutor = new DiagnosisPredictExecutor();
+        DiagnosisPredictExecutor excutor = new DiagnosisPredictExecutor();
 //        PacsPredictExecutor excutor = new PacsPredictExecutor();
 //        DiagnosisToLisExecutor excutor = new DiagnosisToLisExecutor();
 //        DiagnosisToPacsExecutor excutor = new DiagnosisToPacsExecutor();
@@ -75,7 +76,11 @@ public class TensorflowExcutorTest {
         featureValues.put("age", "34");
         featureValues.put("negative", "有");
         featureValues.put("sn", "0");
-        aMap.put("踝关节疼痛", featureValues);
+
+        aMap.put("上臂远端疼痛", featureValues);
+        aMap.put("上肢远端青紫", featureValues);
+        aMap.put("肘部肿胀", featureValues);
+        aMap.put("外伤", featureValues);
 //        aMap.put("心悸", featureValues);
 //        aMap.put("气急", featureValues);
 //        aMap.put("头痛", featureValues);
@@ -87,7 +92,7 @@ public class TensorflowExcutorTest {
 //        for (Entry<String, Float> entry : result.entrySet()) {
 //            System.out.println(entry.getKey() + " : " + entry.getValue());
 //        }
-//        System.out.println(result);
+        System.out.println(result);
         Utils.top_k(10, result);
 
     }

+ 24 - 37
algorithm/src/main/java/org/algorithm/test/Test.java

@@ -1,48 +1,35 @@
 package org.algorithm.test;
 
 
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
 public class Test {
     
     public static void main(String[] args) {
-        
-//        Integer aa = new Integer(53);
-//        Integer bb = new Integer(954);
-//        float xx = 1.0f;
-//        for(int i=1; i< 955; i++) {
-//            xx = (float)(Math.round(1.0f * i / bb*100000))/100000;
-//            System.out.println(i+":"+xx);
-////        }
-//        String filePath = "/opt/models/model_version_replacement/model";
-//        int index = filePath.indexOf("model_version_replacement");
-//
-//        System.out.println(filePath.substring(0, index));
-//            public static void testJSONStrToJavaBeanObj(){
-//
-//        Student student = JSON.parseObject(JSON_OBJ_STR, new TypeReference<Student>() {});
-//        //Student student1 = JSONObject.parseObject(JSON_OBJ_STR, new TypeReference<Student>() {});//因为JSONObject继承了JSON,所以这样也是可以的
-//
-//        System.out.println(student.getStudentName()+":"+student.getStudentAge());
-//
-        String JSON_ARRAY_STR = "[{\"length\":4,\"offset\":0,\"property\":\"1\",\"text\":\"剑突下痛\",\"threshold\":0.0},{\"length\":2,\"offset\":4,\"property\":\"1\",\"text\":\"胀痛\",\"threshold\":0.0},{\"length\":2,\"offset\":6,\"property\":\"2\",\"text\":\"1天\",\"threshold\":0.0},{\"length\":1,\"offset\":8,\"text\":\",\",\"threshold\":0.0}]\n";
-//        JSONArray jsonArray = JSONArray.parseArray(JSON_ARRAY_STR);
-////        String jsonString = "{\"length\":4,\"offset\":0,\"property\":\"1\",\"text\":\"剑突下痛\",\"threshold\":0.0}";
-//
-//       for (int i = 0; i < jsonArray.size(); i++){
-//           JSONObject job = jsonArray.getJSONObject(i);
-//           LemmaInfo info = JSON.parseObject(job.toJSONString(), new TypeReference<LemmaInfo>() {});
-//           //Student student1 = JSONObject.parseObject(JSON_OBJ_STR, new TypeReference<Student>() {});//因为JSONObject继承了JSON,所以这样也是可以的
-//
-//           System.out.println(info.getLength()+":"+info.getText());
-//       }
-
-        int index = 0;
-        for (int i=0; i<5; i++)
-            for (int j = i+1; j< 6; j++){
-                System.out.println(i + "," + j);
-                index ++;
+
+        List<String> aList = new ArrayList<>();
+        aList.add("del");
+        aList.add("del");
+        aList.add("xx");
+        aList.add("yy");
+
+        Iterator<String> it = aList.iterator();
+        boolean xx = false;
+        while(it.hasNext()){
+            String x = it.next();
+            if (!xx){
+
+                if (x.equals("xx"))
+                    xx = true;
+            }
+            if(xx){
+                it.remove();
             }
+        }
 
-        System.out.println(index);
+        System.out.println(aList);
 
     }
 

+ 46 - 0
algorithm/src/main/java/org/algorithm/test/TestDiagnosisFilter.java

@@ -0,0 +1,46 @@
+package org.algorithm.test;
+
+import org.algorithm.core.neural.dataset.NNDataSetImpl;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * @Author: bijl
+ * @Date: 2019/9/23 10:49
+ * @Description:
+ */
+public class TestDiagnosisFilter {
+
+    public static void main(String[] args) {
+        NNDataSetImpl dataSet = new NNDataSetImpl("diagnosisPredict.version");
+
+        dataSet.readFilterDiagnosisDict();  // 读取过滤表
+//        鼻炎|0|2|outpatient_556_IOE_1
+//        肺癌|1|2|outpatient_556_IOE_1
+//        胃肠炎|2|2|outpatient_556_IOE_1
+//        屈光不正|3|2|outpatient_556_IOE_1
+        // 构造方式:去查dictionaries.bin文件中outpatient_556_IOE_1,相关的疾病,形如上
+        float[][] predict = {{0.1f, 0.2f, 0.3f, 0.4f}};
+
+        // 构造输入
+        Map<String, Map<String, String>> inputs = new HashMap<>();
+        Map<String, String> featureMap = new HashMap<>();
+        featureMap.put("negative", "有");
+        featureMap.put("property", "11");
+
+        // 构造方式:去查filter_diagnoses.bin文件中与上述疾病相关的一个或多个特征,加入
+        inputs.put("上腹压痛", featureMap);  // 上腹压痛,只与,胃肠炎,相关
+        // 保存输入
+        dataSet.storeFeatureNames(inputs);
+
+        // 过滤疾病
+        Map<String, Float> result = dataSet.wrapAndFilter(predict);
+        Map<String, Float> result_no_filter = dataSet.basicWrap(predict);
+
+        System.out.println("无疾病过滤:" + result_no_filter);  // 期望输出 {鼻炎=0.1, 肺癌=0.2, 胃肠炎=0.3, 屈光不正=0.4}
+        System.out.println("疾病过滤:" + result);  // 期望输出{胃肠炎=0.3}
+
+
+    }
+}

+ 34 - 0
algorithm/src/main/java/org/algorithm/test/TestReSplit.java

@@ -0,0 +1,34 @@
+package org.algorithm.test;
+
+import org.algorithm.core.neural.dataset.NNDataSetImpl;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * 测试再分词
+ * @Author: bijl
+ * @Date: 2019/9/23 10:46
+ * @Description:
+ */
+public class TestReSplit {
+
+    public static void main(String[] args) {
+
+        NNDataSetImpl dataSet = new NNDataSetImpl("diagnosisPredict.version");
+
+        // 构造输入
+        Map<String, Map<String, String>> inputs = new HashMap<>();
+
+        Map<String, String> featureMap = new HashMap<>();
+        featureMap.put("negative", "有");
+        featureMap.put("property", "11");
+
+        inputs.put("幽门螺杆菌感染", featureMap);
+
+        // 对比再分词前后的变化
+        System.out.println("原来数据:" + inputs);
+        dataSet.reSplitWord(inputs);
+        System.out.println("再分词后数据:" + inputs);
+    }
+}

+ 15 - 0
algorithm/src/main/java/org/algorithm/test/TestRelationTreeUtils.java

@@ -0,0 +1,15 @@
+package org.algorithm.test;
+
+import org.algorithm.core.RelationTreeUtils;
+
+/**
+ * @Author: bijl
+ * @Date: 2019/9/5 17:07
+ * @Description:
+ */
+public class TestRelationTreeUtils {
+
+    public static void main(String[] args) {
+        RelationTreeUtils.test();
+    }
+}

File diff suppressed because it is too large
+ 140 - 0
algorithm/src/main/java/org/algorithm/test/TestRuleCheckMachine.java


+ 1 - 1
algorithm/src/main/java/org/algorithm/util/MysqlConnector.java

@@ -45,7 +45,7 @@ public class MysqlConnector {
     
     /**
      * 执行sql语句
-     * @param sql
+     * @param sqls
      */
     public void executeBatch(List<String> sqls) {
         Statement stmt = null;

+ 1 - 1
algorithm/src/main/resources/algorithm.properties

@@ -2,7 +2,7 @@
 
 #basicPath=E:/project/push/algorithm/src/main/models/model_version_replacement/model
 basicPath=/opt/models/dev/models/model_version_replacement/model
-#basicPath=E:/xxx/model_version_replacement/model
+#basicPath=E:/models_2019_9_24_16_21_29/model_version_replacement/model
 
 ############################### current model version ################################
 diagnosisPredict.version=outpatient_556_IOE_1

+ 12 - 0
bigdata-web/pom.xml

@@ -43,6 +43,12 @@
             <version>1.0.0</version>
         </dependency>
 
+		<dependency>
+			<groupId>org.diagbot</groupId>
+			<artifactId>common-push</artifactId>
+			<version>1.0.0</version>
+		</dependency>
+
         <dependency>
             <groupId>org.diagbot</groupId>
             <artifactId>common-service</artifactId>
@@ -71,6 +77,12 @@
             <version>1.2.5</version>
         </dependency>
 
+		<dependency>
+			<groupId>net.sourceforge.jexcelapi</groupId>
+			<artifactId>jxl</artifactId>
+			<version>2.6.12</version>
+		</dependency>
+
 		<dependency>
 			<groupId>mysql</groupId>
 			<artifactId>mysql-connector-java</artifactId>

+ 0 - 285
bigdata-web/src/main/java/org/diagbot/bigdata/common/ApplicationCacheUtil.java

@@ -1,285 +0,0 @@
-package org.diagbot.bigdata.common;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.common.javabean.Rule;
-import org.diagbot.nlp.participle.ParticipleUtil;
-import org.diagbot.nlp.participle.cfg.Configuration;
-import org.diagbot.nlp.participle.cfg.DefaultConfig;
-import org.diagbot.nlp.participle.word.Lexeme;
-import org.diagbot.nlp.participle.word.LexemePath;
-import org.diagbot.nlp.util.NegativeEnum;
-import org.diagbot.nlp.util.NlpCache;
-import org.diagbot.pub.utils.security.EncrypDES;
-
-import javax.servlet.ServletContext;
-import java.util.*;
-
-public class ApplicationCacheUtil {
-
-    //词库同义词定义
-    public static Map<String, Map<String, String>> standard_info_synonym_map = null;
-    //词库大小类定义
-    public static Map<String, String> standard_info_classify_map = null;
-    //树形结构存储大小类
-    public static Map<String, NlpCache.Node> standard_info_type_tree_map = null;
-//    体征衍射
-//    public static Map<String, String> doc_result_mapping_vital_map = null;
-    //诊断科室衍射
-    public static Map<String, String> doc_result_mapping_diag_map = null;
-    //特征性别 年龄过滤等
-    public static Map<String, Map<String, ResultMappingFilter>> doc_result_mapping_filter_map = null;
-    //诊断依据标准词
-    public static Map<String, List<Map<String, String>>> kl_result_mapping_standword_map = null;
-    // 规则
-    public static Map<String, List<Rule>> rule_filter_map = null;
-    public static Map<String, List<Rule>> kl_rule_filter_map = null;
-
-
-    public static Map<String, Map<String, String>> getStandard_info_synonym_map() {
-        if (standard_info_synonym_map == null) {
-            standard_info_synonym_map = NlpCache.getStandard_info_synonym_map();
-        }
-        return standard_info_synonym_map;
-    }
-
-    public static Map<String, String> getStandard_info_classify_map() {
-        if (standard_info_classify_map == null) {
-            standard_info_classify_map = NlpCache.getStandard_info_classify_map();
-        }
-        return standard_info_classify_map;
-    }
-
-    public static Map<String, NlpCache.Node> getStandard_info_type_tree_map() {
-        if (standard_info_type_tree_map == null) {
-            standard_info_type_tree_map = NlpCache.getStandard_info_type_tree_map();
-        }
-        return standard_info_type_tree_map;
-    }
-
-//    /**
-//     * 現已無用
-//     * @return
-//     */
-//    public static Map<String, String> getDoc_result_mapping_vital_map() {
-//        if (doc_result_mapping_vital_map == null) {
-//            Configuration configuration = new DefaultConfig();
-//            doc_result_mapping_vital_map = configuration.loadMapDict("doc_result_mapping_vital.dict");
-//        }
-//        return doc_result_mapping_vital_map;
-//    }
-
-    public static Map<String, String> getDoc_result_mapping_diag_map() {
-        if (doc_result_mapping_diag_map == null) {
-            createDoc_result_mapping_diag_map();
-        }
-        return doc_result_mapping_diag_map;
-    }
-
-    public static Map<String, String> createDoc_result_mapping_diag_map() {
-        Configuration configuration = new DefaultConfig();
-        doc_result_mapping_diag_map = configuration.loadMapDict("bigdata_diag_2_dept.dict");
-        return doc_result_mapping_diag_map;
-    }
-
-    public static Map<String, Map<String, ResultMappingFilter>> getDoc_result_mapping_filter_map() {
-        if (doc_result_mapping_filter_map == null) {
-            createDoc_result_mapping_filter_map();
-        }
-        return doc_result_mapping_filter_map;
-    }
-
-    public static Map<String, Map<String, ResultMappingFilter>> createDoc_result_mapping_filter_map() {
-        Configuration configuration = new DefaultConfig();
-        List<String> fileContents = configuration.readFileContents("bigdata_sex_age_filter.dict");
-        String[] line_string;
-        List<ResultMappingFilter> resultMappingFilters = new ArrayList<>();
-        try {
-            for (int i = 0; i < fileContents.size(); i++) {
-                line_string = org.apache.commons.lang3.StringUtils.split(fileContents.get(i), "\\|");
-                if (line_string.length == 5) {
-                    ResultMappingFilter resultMappingFilter = new ResultMappingFilter();
-                    resultMappingFilter.setFeatureName(line_string[0]);
-                    resultMappingFilter.setFeatureType(line_string[1]);
-                    resultMappingFilter.setSex(line_string[2]);
-                    resultMappingFilter.setAgeStart(Integer.parseInt(line_string[3]));
-                    resultMappingFilter.setAgeEnd(Integer.parseInt(line_string[4]));
-                    resultMappingFilters.add(resultMappingFilter);
-                }
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        doc_result_mapping_filter_map = new HashMap<>();
-        Map<String, ResultMappingFilter> filterMap = null;
-        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
-            filterMap = doc_result_mapping_filter_map.get(resultMappingFilter.getFeatureType());
-            if (filterMap == null) {
-                filterMap = new HashMap<>();
-            }
-            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
-            doc_result_mapping_filter_map.put(resultMappingFilter.getFeatureType(), filterMap);
-        }
-        return doc_result_mapping_filter_map;
-    }
-
-    public static Map<String, List<Map<String, String>>> getKl_result_mapping_standword_map() {
-        if (kl_result_mapping_standword_map == null) {
-            createKl_result_mapping_standword_map();
-        }
-        return kl_result_mapping_standword_map;
-    }
-
-    public static Map<String, List<Map<String, String>>> createKl_result_mapping_standword_map() {
-        kl_result_mapping_standword_map = new HashMap<>();
-        Configuration configuration = new DefaultConfig();
-        List<String> fileContents = configuration.readFileContents("bigdata_value_analyze.dict");
-        List<Map<String, String>> standWordObjValList = null;
-        Map<String, String> standWordObjVal = null;
-        String operation = ">=|≥|>|大于|>|超过|<=|≤|<|小于|<|少于";
-        try {
-            for (String fileContent : fileContents) {
-                LexemePath<Lexeme> lexemes = null;
-                String op = "";
-                String[] fileContentSplit = null;
-                //每一个标准词根据大于小于符号切开,不然进行分词时还是会得到原本的标准词
-                if (fileContent.contains(">") || fileContent.contains("大于")
-                        || fileContent.contains(">") || fileContent.contains("超过")) {
-                    op = ">";
-                    fileContentSplit = fileContent.split(operation);
-                } else if (fileContent.contains("<") || fileContent.contains("小于")
-                        || fileContent.contains("<") || fileContent.contains("少于")) {
-                    op = "<";
-                    fileContentSplit = fileContent.split(operation);
-                } else if (fileContent.contains(">=") || fileContent.contains("≥")){
-                    op = ">=";
-                    fileContentSplit = fileContent.split(operation);
-                } else if (fileContent.contains("<=") || fileContent.contains("≤")) {
-                    op = "<=";
-                    fileContentSplit = fileContent.split(operation);
-                } else {
-                    continue;
-                }
-                LexemePath<Lexeme> lexemeWord = null;
-                //每一个标准词切开后进行分词
-                for (String fileContentWords : fileContentSplit) {
-                    lexemeWord = ParticipleUtil.participle(fileContentWords);
-                    if (lexemeWord != null) {
-                        if (null == lexemes) {
-                            lexemes = lexemeWord;
-                        } else {
-                            for (Lexeme lexeme : lexemeWord) {
-                                lexemes.add(lexeme);
-                            }
-                        }
-                    }
-                }
-                String standWordObjKey = "";
-                standWordObjValList = new ArrayList<>();
-                standWordObjVal = new HashMap<>();
-                int i = 0;
-                for (Lexeme lexeme : lexemes) {
-                    i++;
-                    if (lexeme.getProperty().contains(",")) {
-                        setProterty(lexeme); //如果分词后词性有多个,只选一个(暂时只处理症状,体征)
-                    }
-                    NegativeEnum lexemeNegativeEnum = NegativeEnum.parseOfValue(lexeme.getProperty());
-                    if (lexemeNegativeEnum == NegativeEnum.SYMPTOM || lexemeNegativeEnum == NegativeEnum.CAUSE
-                            || lexemeNegativeEnum == NegativeEnum.VITAL_INDEX
-                            || lexemeNegativeEnum == NegativeEnum.DIAG_STAND) {
-                        if (!kl_result_mapping_standword_map.containsKey(lexeme.getText())) {
-                            kl_result_mapping_standword_map.put(lexeme.getText(), standWordObjValList);
-                        } else {
-                            standWordObjKey = lexeme.getText();
-                        }
-                    }
-                    if (lexemeNegativeEnum == NegativeEnum.DIGITS) {
-                        standWordObjVal.put("value", lexeme.getText());
-                    }
-                    if (lexemeNegativeEnum == NegativeEnum.UNIT
-                            || lexemeNegativeEnum == NegativeEnum.EVENT_TIME
-                            || lexemeNegativeEnum == NegativeEnum.OTHER) {
-                        standWordObjVal.put("unit", lexeme.getText().toLowerCase());
-                    }
-                    if (lexemes.size() == i) {
-                        standWordObjVal.put("op", op);
-                        standWordObjVal.put("standword", fileContent);
-                        if (kl_result_mapping_standword_map.containsKey(standWordObjKey)) {
-                            kl_result_mapping_standword_map.get(standWordObjKey).add(standWordObjVal);
-                        } else {
-                            standWordObjValList.add(standWordObjVal);
-                        }
-                    }
-                }
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-        return kl_result_mapping_standword_map;
-    }
-
-    public static Map<String, List<Rule>> get_rule_filter_map() {
-        if (rule_filter_map == null || rule_filter_map.size() == 0) {
-            create_rule_filter_map();
-        }
-        return rule_filter_map;
-    }
-
-    public static void create_rule_filter_map() {
-        rule_filter_map = new HashMap<>();
-        List<Rule> rulelist;
-        Map<String, String> rule;
-        String key;
-
-        String[] labels = {"set_name","idx_name","min_operator","min_value","min_unit",
-                "max_operator","max_value","max_unit","eq_operator","eq_value","eq_unit","remind"};
-
-        Configuration configuration = new DefaultConfig();
-        List<String> fileContents = configuration.readFileContents("rule_filter.dict");
-
-        for (String line:fileContents) {
-            rule = new HashMap<>();
-            String[] content = line.split("\\|");
-            if (labels.length == content.length) {
-//                for (int i=0; i<labels.length; i++) {
-//                    rule.put(labels[i], content[i]);
-//                }
-//                key = content[0] + "--" + content[1];
-                key = content[1];
-                if (rule_filter_map.get(key) == null) {
-                    rule_filter_map.put(key, new ArrayList<>());
-                }
-                rulelist = rule_filter_map.get(key);
-                rulelist.add(new Rule(content));
-            }
-        }
-    }
-
-    public static void create_kl_rule_filter_map() {
-        kl_rule_filter_map = new HashMap<>();
-
-        Configuration configuration = new DefaultConfig();
-        List<String> fileContents = configuration.readFileContents("bigdata_rule_filter.dict");
-        for (String line:fileContents) {
-            String[] content = line.split("\\|");
-        }
-    }
-
-    public static void setProterty(Lexeme lexeme) {
-        for (String featureType : lexeme.getProperty().split(",")) {
-            switch (featureType) {
-                case "1":
-                    lexeme.setProperty("1");
-                    break;
-                case "33":
-                    lexeme.setProperty("33");
-                    break;
-                case "70":
-                    lexeme.setProperty("70");
-                    break;
-
-            }
-        }
-    }
-}

+ 0 - 98
bigdata-web/src/main/java/org/diagbot/bigdata/common/InitListener.java

@@ -1,98 +0,0 @@
-package org.diagbot.bigdata.common;
-
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.service.ResultMappingDiagService;
-import org.diagbot.bigdata.service.ResultMappingFilterService;
-import org.diagbot.bigdata.service.ResultMappingVitalService;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.nlp.participle.cfg.Configuration;
-import org.diagbot.nlp.participle.cfg.DefaultConfig;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-import javax.servlet.annotation.WebListener;
-import java.util.*;
-
-@WebListener
-public class InitListener implements ServletContextListener {
-    @Autowired
-    ResultMappingVitalService resultMappingVitalService;
-    @Autowired
-    ResultMappingDiagService resultMappingDiagService;
-    @Autowired
-    ResultMappingFilterService resultMappingFilterService;
-
-    public void contextDestroyed(ServletContextEvent arg0) {
-
-    }
-
-    /**
-     * 开始初始化数据
-     *
-     * @return
-     */
-    public void contextInitialized(ServletContextEvent event) {
-//        contextStandardLibraryInitialized(event);
-//        contextFeatureMappingInitialized(event);
-//        contextResultMappingDiagInitialized(event);
-//        contextResultMappingFilterInitialized(event);
-    }
-
-//    public void contextStandardLibraryInitialized(ServletContextEvent event) {
-//        ApplicationCacheUtil applicationCacheUtil = new ApplicationCacheUtil();
-//        applicationCacheUtil.putStandardInfoContext(event.getServletContext());
-//    }
-
-//    public void contextFeatureMappingInitialized(ServletContextEvent event) {
-//        Configuration configuration = new DefaultConfig();
-//        Map<String, String> resultMappingVitals = configuration.loadMapDict("tc.dict");
-////        List<ResultMappingVital> resultMappingVitals = resultMappingVitalService.selectList(new HashMap<>());
-//        Map<String, String> mapping = new HashMap<>();
-//        for (ResultMappingVital resultMappingVital : resultMappingVitals) {
-//            mapping.put(resultMappingVital.getName(), resultMappingVital.getNameMapping());
-//        }
-//        event.getServletContext().setAttribute(BigDataConstants.result_mapping_vital, mapping);
-//    }
-//
-//    public void contextResultMappingDiagInitialized(ServletContextEvent event) {
-//        List<ResultMappingDiag> resultMappingDiags = resultMappingDiagService.selectList(new HashMap<>());
-//
-//        Map<String, String> mapping = new HashMap<>();
-//        for (ResultMappingDiag resultMappingDiag : resultMappingDiags) {
-//            mapping.put(resultMappingDiag.getDiagName(), resultMappingDiag.getDeptName());
-//        }
-//        event.getServletContext().setAttribute(BigDataConstants.result_mapping_diag, mapping);
-//    }
-//
-//    public void contextResultMappingFilterInitialized(ServletContextEvent event) {
-//        List<ResultMappingFilter> resultMappingFilters = resultMappingFilterService.selectList(new HashMap<>());
-//
-//        Map<String, Map<String, ResultMappingFilter>> mapping = new HashMap<>();
-//        Map<String, ResultMappingFilter> filterMap = null;
-//        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
-//            filterMap = mapping.get(resultMappingFilter.getFeatureType());
-//            if (filterMap == null) {
-//                filterMap = new HashMap<>();
-//            }
-//            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
-//            mapping.put(resultMappingFilter.getFeatureType(), filterMap);
-//        }
-//        event.getServletContext().setAttribute(BigDataConstants.result_mapping_filter, mapping);
-//    }
-
-    private void put(Map<String, List<String>> map, String key, List<String> value, String ele) {
-        if (value == null) {
-            value = new ArrayList<>(Arrays.asList(ele));
-            map.put(key, value);
-        } else {
-            if (!value.contains(ele)) {
-                value.add(ele);
-                map.put(key, value);
-            }
-        }
-    }
-}

+ 2 - 2
bigdata-web/src/main/java/org/diagbot/bigdata/controller/AlgorithmController.java

@@ -1,8 +1,8 @@
 package org.diagbot.bigdata.controller;
 
 import org.diagbot.bigdata.work.AlgorithmCore;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.pub.api.Response;
 import org.diagbot.pub.web.BaseController;

+ 0 - 22
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingDiagMapper.java

@@ -1,22 +0,0 @@
-/** 
-* @Company: 杭州朗通信息技术有限公司
-* @Department: 医疗事业部
-* @Description: 互动反馈系统 
-* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
-*/
-package org.diagbot.bigdata.dao.mapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
-import org.diagbot.pub.orm.EntityMapper;
-
-/**
-* @Title: Feature.java
-* @Package: com.zjlantone.nlp.web.doc.dao.model
-* @Description: 数据库操作接口类 
-* @author: 楼辉荣
-* @date: 2016年8月8日 下午17:16:23
-* @version: V1.0
-*/
-public interface ResultMappingDiagMapper extends EntityMapper<ResultMappingDiag, ResultMappingDiagWrapper, Long> {
-}

+ 0 - 22
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingFilterMapper.java

@@ -1,22 +0,0 @@
-/** 
-* @Company: 杭州朗通信息技术有限公司
-* @Department: 医疗事业部
-* @Description: 互动反馈系统 
-* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
-*/
-package org.diagbot.bigdata.dao.mapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
-import org.diagbot.pub.orm.EntityMapper;
-
-/**
-* @Title: Feature.java
-* @Package: com.zjlantone.nlp.web.doc.dao.model
-* @Description: 数据库操作接口类 
-* @author: 楼辉荣
-* @date: 2016年8月8日 下午17:16:23
-* @version: V1.0
-*/
-public interface ResultMappingFilterMapper extends EntityMapper<ResultMappingFilter, ResultMappingFilterWrapper, Long> {
-}

+ 0 - 22
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingVitalMapper.java

@@ -1,22 +0,0 @@
-/** 
-* @Company: 杭州朗通信息技术有限公司
-* @Department: 医疗事业部
-* @Description: 互动反馈系统 
-* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
-*/
-package org.diagbot.bigdata.dao.mapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper;
-import org.diagbot.pub.orm.EntityMapper;
-
-/**
-* @Title: Feature.java
-* @Package: com.zjlantone.nlp.web.doc.dao.model
-* @Description: 数据库操作接口类 
-* @author: 楼辉荣
-* @date: 2016年8月8日 下午17:16:23
-* @version: V1.0
-*/
-public interface ResultMappingVitalMapper extends EntityMapper<ResultMappingVital, ResultMappingVitalWrapper, Long> {
-}

+ 0 - 38
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingDiag.java

@@ -1,38 +0,0 @@
-package org.diagbot.bigdata.dao.model;
-
-import java.io.Serializable;
-
-/**
- * @Auther: fyeman
- * @Date: 2018/9/12/012 16:50
- * @Description:
- */
-public class ResultMappingDiag implements Serializable {
-    private Long id;
-    private String diagName;
-    private String deptName;
-
-    public Long getId() {
-        return id;
-    }
-
-    public void setId(Long id) {
-        this.id = id;
-    }
-
-    public String getDiagName() {
-        return diagName;
-    }
-
-    public void setDiagName(String diagName) {
-        this.diagName = diagName;
-    }
-
-    public String getDeptName() {
-        return deptName;
-    }
-
-    public void setDeptName(String deptName) {
-        this.deptName = deptName;
-    }
-}

+ 0 - 38
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingVital.java

@@ -1,38 +0,0 @@
-package org.diagbot.bigdata.dao.model;
-
-import java.io.Serializable;
-
-/**
- * @Auther: fyeman
- * @Date: 2018/9/10/010 14:30
- * @Description:
- */
-public class ResultMappingVital implements Serializable {
-    private Long id;
-    private String name;
-    private String nameMapping;
-
-    public Long getId() {
-        return id;
-    }
-
-    public void setId(Long id) {
-        this.id = id;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public String getNameMapping() {
-        return nameMapping;
-    }
-
-    public void setNameMapping(String nameMapping) {
-        this.nameMapping = nameMapping;
-    }
-}

+ 0 - 12
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingDiagWrapper.java

@@ -1,12 +0,0 @@
-package org.diagbot.bigdata.dao.model.wrapper;
-
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-
-/**
- * @Auther: fyeman
- * @Date: 2018/9/12/012 16:51
- * @Description:
- */
-public class ResultMappingDiagWrapper extends ResultMappingDiag {
-}

+ 0 - 6
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingFilterWrapper.java

@@ -1,6 +0,0 @@
-package org.diagbot.bigdata.dao.model.wrapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-
-public class ResultMappingFilterWrapper extends ResultMappingFilter {
-}

+ 0 - 7
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingVitalWrapper.java

@@ -1,7 +0,0 @@
-package org.diagbot.bigdata.dao.model.wrapper;
-
-
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-
-public class ResultMappingVitalWrapper extends ResultMappingVital {
-}

+ 0 - 55
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingDiagMapper.xml

@@ -1,55 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
-        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
-<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingDiagMapper">
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingDiag" id="resultMappingDiagMap">
-        <id property="id" column="id"/>
-        <result property="diagName" column="diag_name"/>
-        <result property="deptName" column="dept_name"/>
-    </resultMap>
-
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper" id="resultMappingDiagWrapperMap">
-        <id property="id" column="id"/>
-        <result property="diagName" column="diag_name"/>
-        <result property="deptName" column="dept_name"/>
-    </resultMap>
-
-    <!-- 通用查询结果列-->
-    <sql id="Base_Column_List">
-		 t.id,	 t.diag_name,	 t.dept_name
-	</sql>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectByPrimaryKey" resultMap="resultMappingDiagMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_diag t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingDiagWrapperMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_diag t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 依据Map查询条件返回结果集-->
-    <select id="selectList" resultMap="resultMappingDiagMap" parameterType="java.util.Map">
-        select diag_name, group_concat(dept_name) dept_name from doc_result_mapping_diag  group by diag_name
-    </select>
-
-    <!-- 依据Map查询条件返回扩展属性结果集-->
-    <select id="selectListWrapper" resultMap="resultMappingDiagWrapperMap" parameterType="java.util.Map">
-        select diag_name, group_concat(dept_name) dept_name from doc_result_mapping_diag  group by diag_name
-    </select>
-
-    <!--删除:根据主键ID删除-->
-    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
-		 DELETE FROM doc_result_mapping_diag
-		 WHERE id = #{id}
-	</delete>
-</mapper>

+ 0 - 67
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingFilterMapper.xml

@@ -1,67 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
-        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
-<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingFilterMapper">
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingFilter" id="resultMappingFilterMap">
-        <id property="id" column="id"/>
-        <result property="featureName" column="feature_name"/>
-        <result property="featureType" column="feature_type"/>
-        <result property="sex" column="sex"/>
-        <result property="ageStart" column="age_start"/>
-        <result property="ageEnd" column="age_end"/>
-        <result property="remark" column="remark"/>
-    </resultMap>
-
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper" id="resultMappingFilterWrapperMap">
-        <id property="id" column="id"/>
-        <result property="featureName" column="feature_name"/>
-        <result property="featureType" column="feature_type"/>
-        <result property="sex" column="sex"/>
-        <result property="ageStart" column="age_start"/>
-        <result property="ageEnd" column="age_end"/>
-        <result property="remark" column="remark"/>
-    </resultMap>
-
-    <!-- 通用查询结果列-->
-    <sql id="Base_Column_List">
-		 t.id, t.feature_name, t.feature_type, t.sex, t.age_start, t.age_end, t.remark
-	</sql>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectByPrimaryKey" resultMap="resultMappingFilterMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingFilterWrapperMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 依据Map查询条件返回结果集-->
-    <select id="selectList" resultMap="resultMappingFilterMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t WHERE 1=1
-    </select>
-
-    <!-- 依据Map查询条件返回扩展属性结果集-->
-    <select id="selectListWrapper" resultMap="resultMappingFilterWrapperMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t WHERE 1=1
-    </select>
-
-    <!--删除:根据主键ID删除-->
-    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
-		 DELETE FROM doc_result_mapping_filter
-		 WHERE id = #{id}
-	</delete>
-</mapper>

+ 0 - 77
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingVitalMapper.xml

@@ -1,77 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
-        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
-<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingVitalMapper">
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingVital" id="resultMappingVitalMap">
-        <id property="id" column="id"/>
-        <result property="name" column="name"/>
-        <result property="nameMapping" column="name_mapping"/>
-    </resultMap>
-
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper" id="resultMappingVitalWrapperMap">
-        <id property="id" column="id"/>
-        <result property="name" column="name"/>
-        <result property="nameMapping" column="name_mapping"/>
-    </resultMap>
-
-    <!-- 通用查询结果列-->
-    <sql id="Base_Column_List">
-		 t.id,	 t.name,	 t.name_mapping
-	</sql>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectByPrimaryKey" resultMap="resultMappingVitalMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingVitalWrapperMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 依据Map查询条件返回结果集-->
-    <select id="selectList" resultMap="resultMappingVitalMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t WHERE 1=1
-        <if test="id != null and id != ''">
-            and t.id = #{id}
-        </if>
-        <if test="name != null and name != ''">
-            and t.name = #{name}
-        </if>
-        <if test="nameMapping != null and nameMapping != ''">
-            and t.name_mapping = #{nameMapping}
-        </if>
-    </select>
-
-    <!-- 依据Map查询条件返回扩展属性结果集-->
-    <select id="selectListWrapper" resultMap="resultMappingVitalWrapperMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t WHERE 1=1
-        <if test="id != null and id != ''">
-            and t.id = #{id}
-        </if>
-        <if test="name != null and name != ''">
-            and t.name = #{name}
-        </if>
-        <if test="nameMapping != null and nameMapping != ''">
-            and t.name_mapping = #{nameMapping}
-        </if>
-    </select>
-
-    <!--删除:根据主键ID删除-->
-    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
-		 DELETE FROM doc_result_mapping_vital
-		 WHERE id = #{id}
-	</delete>
-</mapper>

+ 0 - 8
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingDiagService.java

@@ -1,8 +0,0 @@
-package org.diagbot.bigdata.service;
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
-import org.diagbot.pub.service.BaseService;
-
-public interface ResultMappingDiagService extends BaseService<ResultMappingDiag, ResultMappingDiagWrapper, Long> {
-}

+ 0 - 8
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingFilterService.java

@@ -1,8 +0,0 @@
-package org.diagbot.bigdata.service;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
-import org.diagbot.pub.service.BaseService;
-
-public interface ResultMappingFilterService extends BaseService<ResultMappingFilter, ResultMappingFilterWrapper, Long> {
-}

+ 0 - 8
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingVitalService.java

@@ -1,8 +0,0 @@
-package org.diagbot.bigdata.service;
-
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper;
-import org.diagbot.pub.service.BaseService;
-
-public interface ResultMappingVitalService extends BaseService<ResultMappingVital, ResultMappingVitalWrapper, Long> {
-}

+ 0 - 21
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingDiagServiceImpl.java

@@ -1,21 +0,0 @@
-package org.diagbot.bigdata.service.impl;
-
-import org.diagbot.bigdata.dao.mapper.ResultMappingDiagMapper;
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
-import org.diagbot.bigdata.service.ResultMappingDiagService;
-import org.diagbot.pub.service.BaseServiceImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class ResultMappingDiagServiceImpl extends BaseServiceImpl<ResultMappingDiag, ResultMappingDiagWrapper, Long> implements ResultMappingDiagService {
-    @Autowired
-    ResultMappingDiagMapper resultMappingDiagMapper;
-
-    @Autowired
-    private void setEntityMapper() {
-        super.setEntityMapper(resultMappingDiagMapper);
-    }
-
-}

+ 0 - 21
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingFilterServiceImpl.java

@@ -1,21 +0,0 @@
-package org.diagbot.bigdata.service.impl;
-
-import org.diagbot.bigdata.dao.mapper.ResultMappingFilterMapper;
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
-import org.diagbot.bigdata.service.ResultMappingFilterService;
-import org.diagbot.pub.service.BaseServiceImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class ResultMappingFilterServiceImpl extends BaseServiceImpl<ResultMappingFilter, ResultMappingFilterWrapper, Long> implements ResultMappingFilterService {
-    @Autowired
-    ResultMappingFilterMapper resultMappingFilterMapper;
-
-    @Autowired
-    private void setEntityMapper() {
-        super.setEntityMapper(resultMappingFilterMapper);
-    }
-
-}

+ 0 - 21
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingVitalServiceImpl.java

@@ -1,21 +0,0 @@
-package org.diagbot.bigdata.service.impl;
-
-import org.diagbot.bigdata.dao.mapper.ResultMappingVitalMapper;
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper;
-import org.diagbot.bigdata.service.ResultMappingVitalService;
-import org.diagbot.pub.service.BaseServiceImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class ResultMappingVitalServiceImpl extends BaseServiceImpl<ResultMappingVital, ResultMappingVitalWrapper, Long> implements ResultMappingVitalService {
-    @Autowired
-    ResultMappingVitalMapper resultMappingVitalMapper;
-
-    @Autowired
-    private void setEntityMapper() {
-        super.setEntityMapper(resultMappingVitalMapper);
-    }
-
-}

+ 29 - 13
bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java

@@ -3,9 +3,10 @@ package org.diagbot.bigdata.work;
 import org.algorithm.core.AlgorithmExecutor;
 import org.algorithm.factory.AlgorithmFactory;
 import org.algorithm.util.AlgorithmClassify;
-import org.diagbot.common.work.FeatureRate;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.naivebayes.factory.AlgorithmNaiveBayesFactory;
 import org.diagbot.nlp.feature.FeatureType;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.nlp.util.NlpCache;
@@ -25,14 +26,12 @@ import java.util.*;
  **/
 public class AlgorithmCore {
     Logger logger = LoggerFactory.getLogger(AlgorithmCore.class);
+
     public ResponseData algorithm(HttpServletRequest request, SearchData searchData, ResponseData responseData) throws Exception {
-        if (responseData == null) {
-            responseData = new ResponseData();
-        }
         //录入文本处理,包括提取特征、推送类型转换等
-        ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
+        BigDataParamsProxy paramsDataProxy = new BigDataParamsProxy();
         logger.info("页面文本信息:" + searchData.getSymptom());
-        paramsDataProxy.createSearchData(request, searchData);
+        paramsDataProxy.createSearchData(searchData);
         //对象拷贝至BigDataSearchData处理
         BigDataSearchData bigDataSearchData = new BigDataSearchData();
         BeanUtils.copyProperties(searchData, bigDataSearchData);
@@ -49,11 +48,30 @@ public class AlgorithmCore {
             if (classifies[i] == null) {
                 continue;
             }
-            //算法推理
-            AlgorithmExecutor executor = AlgorithmFactory.getInstance(classifies[i]);
             Map<String, Float> featuresMap = null;
+            AlgorithmExecutor executor = null;
+            switch (searchData.getAlgorithmType() == null ? 1 : searchData.getAlgorithmType()) {
+                case 1: //机器学习算法推理
+                    executor = AlgorithmFactory.getInstance(classifies[i]);
+                    if (FeatureType.parse(featureTypes[i]) == FeatureType.DIAG && !"2".equals(searchData.getSysCode())) {
+                        bigDataSearchData.setLength(6);//模型推送最多6个比较合理
+                    }
+                    break;
+                case 2: //朴素贝叶斯算法推理
+                    if (FeatureType.parse(featureTypes[i]) == FeatureType.DIAG) {
+                        executor = AlgorithmNaiveBayesFactory.getInstance();
+                    }
+                    break;
+                default:
+                    executor = AlgorithmFactory.getInstance(classifies[i]);
+            }
+
             if (executor != null) {
-                featuresMap = executor.execute(bigDataSearchData.getInputs());;
+                if (bigDataSearchData.getInputs().get("sentence") != null) {
+                    featuresMap = executor.execute(bigDataSearchData.getInputs());
+                }
+            } else {
+                continue;
             }
             List<Map.Entry<String, Float>> featuresOrderList = null;
             if (featuresMap == null) {
@@ -65,8 +83,6 @@ public class AlgorithmCore {
                 if (Constants.feature_type_symptom.equals(searchData.getFeatureTypes()[i])) {
                     featuresMap = resultDataProxy.mapAdd(featuresMap, NlpCache.getStandard_info_push_map(), true);
                 }
-                //大小类合并
-                featuresMap = resultDataProxy.resultMerge(request, featuresMap);
                 //按模型计算的概率排序
                 featuresOrderList = new ArrayList<Map.Entry<String, Float>>(featuresMap.entrySet());
                 Collections.sort(featuresOrderList, new Comparator<Map.Entry<String, Float>>() {

+ 95 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataParamsProxy.java

@@ -0,0 +1,95 @@
+package org.diagbot.bigdata.work;
+
+import org.algorithm.util.AlgorithmClassify;
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.work.ParamsDataProxy;
+import org.diagbot.nlp.feature.FeatureType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 14:04
+ * @Version 1.0
+ **/
+public class BigDataParamsProxy {
+    Logger logger = LoggerFactory.getLogger(BigDataParamsProxy.class);
+
+    public void createSearchData(SearchData searchData) throws Exception {
+        ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
+        paramsDataProxy.createSearchData(searchData);
+    }
+
+    /**
+     * featureType转算法模型类型
+     *
+     * @param sysCode
+     * @param featureTypes
+     * @param searchData
+     */
+    public AlgorithmClassify[] createAlgorithmClassify(String sysCode, String[] featureTypes, SearchData searchData) {
+        AlgorithmClassify[] classifies = new AlgorithmClassify[featureTypes.length];
+        //下了诊断且其他信息全为空 反推标识
+        boolean reverse = searchData.getDiagOrder() != null && searchData.getDiagOrder().size() > 0 && StringUtils.isEmpty(searchData.getSymptom());
+        for (int i = 0; i < featureTypes.length; i++) {
+            if (featureTypes[i] != null) {
+                //模型
+                switch (FeatureType.parse(featureTypes[i])) {
+                    case SYMPTOM:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_SYMPTOM;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_SYMPTOM;
+                        }
+                        break;
+                    case DIAG:
+                        if (reverse) {
+                            classifies[i] = null;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG;
+                        }
+                        break;
+                    case VITAL:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_VITAL;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_VITAL;
+                        }
+                        break;
+                    case LIS:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_LIS;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_LIS;
+                        }
+                        break;
+                    case PACS:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_PACS;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_PACS;
+                        }
+                        break;
+                    case TREAT:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_TREAT;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_TREAT;
+                        }
+                        break;
+                    case HISTORY:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_HISTORY;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_HISTORY;
+                        }
+                        break;
+                }
+            }
+        }
+        return classifies;
+    }
+}

+ 1 - 1
bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataSearchData.java

@@ -1,7 +1,7 @@
 package org.diagbot.bigdata.work;
 
 import org.algorithm.util.AlgorithmClassify;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.SearchData;
 
 public class BigDataSearchData extends SearchData {
     //模型

+ 0 - 702
bigdata-web/src/main/java/org/diagbot/bigdata/work/ParamsDataProxy.java

@@ -1,702 +0,0 @@
-package org.diagbot.bigdata.work;
-
-import org.algorithm.util.AlgorithmClassify;
-import org.apache.commons.lang3.StringUtils;
-import org.diagbot.bigdata.common.ApplicationCacheUtil;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.common.work.SearchData;
-import org.diagbot.nlp.feature.FeatureAnalyze;
-import org.diagbot.nlp.feature.FeatureType;
-import org.diagbot.nlp.participle.ParticipleUtil;
-import org.diagbot.nlp.participle.word.Lexeme;
-import org.diagbot.nlp.participle.word.LexemePath;
-import org.diagbot.nlp.util.Constants;
-import org.diagbot.nlp.util.NegativeEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.http.HttpServletRequest;
-import java.util.*;
-import java.util.regex.Pattern;
-
-/**
- * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
- * @Description TODO
- * @Author fyeman
- * @Date 2019/1/16/016 14:04
- * @Version 1.0
- **/
-public class ParamsDataProxy {
-    Logger logger = LoggerFactory.getLogger(ParamsDataProxy.class);
-    //标准词只处理的词性
-    public static NegativeEnum[] negativeEnums = new NegativeEnum[] { NegativeEnum.VITAL_INDEX, NegativeEnum.SYMPTOM
-            , NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME, NegativeEnum.UNIT, NegativeEnum.DIAG_STAND
-            , NegativeEnum.OTHER};
-    //标准词处理的三元组
-    public static NegativeEnum[][] negativeEnumTriple = {
-            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS, NegativeEnum.UNIT },
-            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
-            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS, NegativeEnum.UNIT },
-            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
-            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.UNIT },
-            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
-            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.OTHER }
-    };
-    //标准词处理的二元组
-    public static NegativeEnum[][] negativeEnumTwoTuple = {
-            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS },
-            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS }
-    };
-
-    public void createSearchData(HttpServletRequest request, SearchData searchData) throws Exception {
-        //消除空格
-        if (searchData.getSymptom() != null) {
-            searchData.setSymptom(searchData.getSymptom().trim());
-        }
-        if (searchData.getDiag() != null) {
-            searchData.setDiag(searchData.getDiag().trim());
-        }
-        //计算年龄区间
-        if (searchData.getAge() > 0) {
-            searchData.setAge_start(searchData.getAge() - 5);
-            searchData.setAge_end(searchData.getAge() + 5);
-        }
-        //修改性别代码
-        if (!StringUtils.isEmpty(searchData.getSex())) {
-            if ("M".equals(searchData.getSex())) {
-                searchData.setSex("1");
-            } else if ("F".equals(searchData.getSex())) {
-                searchData.setSex("2");
-            } else {
-                searchData.setSex("3");
-            }
-        } else {
-            searchData.setSex("3");
-        }
-        //默认查询门诊数据
-        if (StringUtils.isEmpty(searchData.getResourceType())) {
-            searchData.setResourceType(BigDataConstants.resource_type_o);
-        }
-        //给症状末尾添加诊断依据标准词
-        String[] items = { searchData.getSymptom(), searchData.getOther(), searchData.getVital()
-                , searchData.getLis(), searchData.getPacs(), searchData.getDiag() };
-        String[] itemsType = { "symptom", "other", "vital", "lis", "pacs", "diag" };
-        for (int i = 0; i < items.length; i++) {
-            if (items[i] != null) {
-                LexemePath<Lexeme> featureData = ParticipleUtil.participle(items[i]);
-                if (featureData != null) {
-                    addStandWord(featureData, ApplicationCacheUtil.getKl_result_mapping_standword_map(), searchData, itemsType[i]);
-                }
-            }
-        }
-        //所有信息参与推送
-        //        searchData.setSymptom(searchData.getSymptom() + searchData.getVital()
-        //                + searchData.getLis() + searchData.getPacs() + searchData.getPast() + searchData.getOther() + searchData.getIndications());
-        if (StringUtils.isNotEmpty(searchData.getSymptom())) {
-            searchData.setSymptom(searchData.getSymptom().trim());
-        }
-        //一次推送多个类别信息
-        String[] featureTypes = searchData.getFeatureType().split(",");
-        //featureType统一转换
-        String[] convertFeatureTypes = new String[featureTypes.length];
-        for (int i = 0; i < featureTypes.length; i++) {
-            convertFeatureTypes[i] = convertFeatureType(searchData.getSysCode(), featureTypes[i]);
-        }
-        searchData.setFeatureType(StringUtils.join(convertFeatureTypes, ","));
-        searchData.setFeatureTypes(convertFeatureTypes);
-
-        //获取入参中的特征信息
-        FeatureAnalyze fa = new FeatureAnalyze();
-        List<Map<String, Object>> featuresList = new ArrayList<>();
-        if (!StringUtils.isEmpty(searchData.getSymptom())) {
-            //提取现病史
-            featuresList = fa.start(searchData.getSymptom(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-            //提取时间信息
-            featuresList = fa.start(searchData.getSymptom(), FeatureType.TIME);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getVital())) {
-            //提取体征
-            featuresList = fa.start(searchData.getVital(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getPast())) {
-            //提取既往史
-            featuresList = fa.start(searchData.getPast(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getOther()) || !StringUtils.isEmpty(searchData.getIndications())) {
-            //提取其他史等
-            featuresList = fa.start((searchData.getOther() == null ? "" : searchData.getOther()) + (searchData.getIndications() == null ? "" : searchData.getIndications()), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getPacs())) {
-            featuresList = fa.start(searchData.getPacs(), FeatureType.PACS);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getLis())) {
-            featuresList = fa.start(searchData.getLis(), FeatureType.LIS);
-            paramFeatureInit(searchData, featuresList);
-        }
-        // 清洗特征词,去除词性不匹配的词
-        searchData = cleanFeature(featuresList, fa, searchData);
-        if (!StringUtils.isEmpty(searchData.getOther())) {
-            //如果既往史中诊断信息,需要提取这个特征
-            featuresList = fa.start(searchData.getOther(), FeatureType.DIAG);
-            paramFeatureInit(searchData, featuresList);
-        }
-
-        if (!StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom())) {
-            featuresList = fa.start(searchData.getDiag(), FeatureType.DIAG);
-            paramFeatureInit(searchData, featuresList);
-        }
-    }
-
-    /**
-     * featureType转算法模型类型
-     *
-     * @param sysCode
-     * @param featureTypes
-     * @param searchData
-     */
-    public AlgorithmClassify[] createAlgorithmClassify(String sysCode, String[] featureTypes, SearchData searchData) {
-        AlgorithmClassify[] classifies = new AlgorithmClassify[featureTypes.length];
-        //下了诊断且其他信息全为空 反推标识
-        boolean reverse = !StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom());
-        for (int i = 0; i < featureTypes.length; i++) {
-            //            featureTypes[i] = convertFeatureType(sysCode, featureTypes[i]);
-            if (featureTypes[i] != null) {
-                //模型
-                switch (FeatureType.parse(featureTypes[i])) {
-                    case SYMPTOM:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_SYMPTOM;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_SYMPTOM;
-                        }
-                        break;
-                    case DIAG:
-                        if (reverse) {
-                            classifies[i] = null;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG;
-                        }
-                        break;
-                    case VITAL:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_VITAL;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_VITAL;
-                        }
-                        break;
-                    case LIS:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_LIS;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_LIS;
-                        }
-                        break;
-                    case PACS:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_PACS;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_PACS;
-                        }
-                        break;
-                    case TREAT:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_TREAT;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_TREAT;
-                        }
-                        break;
-                    case HISTORY:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_HISTORY;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_HISTORY;
-                        }
-                        break;
-                }
-            }
-        }
-        return classifies;
-    }
-
-    /**
-     * 外部系统featureType需要转化为大数据定义的featureType
-     *
-     * @param sysCode
-     * @param featureType
-     * @return
-     */
-    private String convertFeatureType(String sysCode, String featureType) {
-        if (StringUtils.isEmpty(sysCode) || sysCode.equals("1")) {
-            if ("1".equals(featureType)) {
-                return BigDataConstants.feature_type_symptom;
-            }
-            if ("7".equals(featureType)) {
-                return BigDataConstants.feature_type_diag;
-            }
-            if ("4".equals(featureType)) {
-                return BigDataConstants.feature_type_vital;
-            }
-            if ("5".equals(featureType)) {
-                return BigDataConstants.feature_type_lis;
-            }
-            if ("6".equals(featureType)) {
-                return BigDataConstants.feature_type_pacs;
-            }
-            if ("3".equals(featureType)) {
-                return BigDataConstants.feature_type_history;
-            }
-            if ("8".equals(featureType)) {
-                return BigDataConstants.feature_type_treat;
-            }
-            if ("22".equals(featureType)) {
-                return BigDataConstants.feature_type_labelpush;
-            }
-            if ("11".equals(featureType)) {
-                return BigDataConstants.feature_type_manju;
-            }
-            if ("42".equals(featureType)) {
-                return BigDataConstants.feature_type_vital_index;
-            }
-            return null;
-        }
-        return featureType;
-    }
-
-    /**
-     * 推送模型入参
-     *
-     * @param searchData
-     * @throws Exception
-     */
-    private void paramFeatureInit(SearchData searchData, List<Map<String, Object>> featuresList) throws Exception {
-        if (featuresList != null && featuresList.size() > 0) {
-            Map<String, Object> featureMap = null;
-            for (int i = 0; i < featuresList.size(); i++) {
-                featureMap = featuresList.get(i);
-                Map<String, String> map = new HashMap<>();
-                for (Map.Entry<String, Object> entry : featureMap.entrySet()) {
-                    map.put(entry.getKey(), String.valueOf(entry.getValue()));
-                }
-                map.put("featureType", String.valueOf(featureMap.get("feature_type")));
-                map.put("featureName", String.valueOf(featureMap.get("feature_name")));
-                map.put("property", String.valueOf(featureMap.get("property")));
-                map.put("concept", String.valueOf(featureMap.get("concept")));
-                if (Constants.default_negative.equals(featureMap.get("negative"))) {
-                    if (map.get("featureType").equals(Constants.feature_type_time)) {
-                        searchData.getInputs().put("时间", map);
-                    } else {
-                        if (searchData.getInputs().get(map.get("feature_name")) == null) {
-                            if (i < 5) {
-                                searchData.getInputs().put(map.get("feature_name"), map);
-                            }
-                            searchData.getGraphInputs().put(map.get("feature_name"), map);
-                        }
-                    }
-                } else {
-                    searchData.getFilters().put(map.get("feature_name"), map);
-                }
-            }
-        }
-    }
-
-    /**
-     * 给SearchData中症状末尾添加诊断依据标准词
-     *
-     * @param lexemes
-     * @param standWords
-     * @param sData
-     * @return
-     */
-    public SearchData addStandWord(List<Lexeme> lexemes, Map<String, List<Map<String, String>>> standWords, SearchData sData, String itemType) {
-        List<Lexeme> feature = new ArrayList<>();
-
-        //收集分词结果中体征指标或体征指标值(数字)
-        for (Lexeme lexeme : lexemes) {
-            if (lexeme.getProperty().contains(",")) {
-                ApplicationCacheUtil.setProterty(lexeme); //如果分词后词性有多个,只选一个(暂时只处理症状,体征)
-            }
-            NegativeEnum lexemeNegativeEnum = NegativeEnum.parseOfValue(lexeme.getProperty());
-            for (int i = 0; i < negativeEnums.length; i++) {
-                if (lexemeNegativeEnum == negativeEnums[i]) {
-                    feature.add(lexeme);
-                    break;
-                }
-            }
-        }
-        //根据收集到的分词结果把体征指标和对应体征指标值(数字)拼接
-        List<String> featureType = new ArrayList<>();
-
-        for (int i = 0; i < feature.size(); i++) {
-            boolean featureTypeState = true;
-            boolean featureTypeStatus = false;
-            if (i < feature.size() - 2) {
-                for (int j = 0; j < negativeEnumTriple.length; j++) {
-                    String featureText = "";
-                    for (int k = 0; k < negativeEnumTriple[j].length; k++) {
-                        if (NegativeEnum.parseOfValue(feature.get(i + k).getProperty()) == negativeEnumTriple[j][k]) {
-                            featureTypeStatus = true;
-                            featureText += "\t" + feature.get(i + k).getText();
-                        } else {
-                            featureTypeStatus = false;
-                            break;
-                        }
-                    }
-                    if (featureTypeStatus) {
-                        featureType.add(featureText);
-                        featureTypeState = false;
-                    }
-                }
-            }
-            if (featureTypeState && i < feature.size() - 1) {
-                for (int j = 0; j < negativeEnumTwoTuple.length; j++) {
-                    String featureText = "";
-                    for (int k = 0; k < negativeEnumTwoTuple[j].length; k++) {
-                        if (NegativeEnum.parseOfValue(feature.get(i + k).getProperty()) == negativeEnumTwoTuple[j][k]) {
-                            featureTypeStatus = true;
-                            featureText += "\t" + feature.get(i + k).getText();
-                        } else {
-                            featureTypeStatus = false;
-                            break;
-                        }
-                    }
-                    if (featureTypeStatus) {
-                        featureType.add(featureText);
-                    }
-                }
-            }
-        }
-        //将标准词中体征指标值(数字)与分词结果中体征指标值(数字)比较
-        String newStandWord = "";
-        for (String f : featureType) {
-            String[] features = f.trim().split("\t");
-            if (standWords.containsKey(features[0])) {
-                List<Map<String, String>> standWordList = standWords.get(features[0]);
-                for (Map<String, String> standWordMap : standWordList) {
-                    if (standWordMap.containsKey("unit") && standWordMap.containsKey("value")) {
-                        if (features.length == 2) {
-                            newStandWord = judgment(features, standWordMap, newStandWord, sData, itemType);
-                        } else {
-                            if (standWordMap.get("unit").equals(features[2].toLowerCase())) {
-                                newStandWord = judgment(features, standWordMap, newStandWord, sData, itemType);
-                            }
-                        }
-                    } else if (standWordMap.containsKey("value")) {
-                        if (features.length == 2) {
-                            newStandWord = judgment(features, standWordMap, newStandWord, sData, itemType);
-                        }
-                    }
-                }
-            }
-        }
-        //血压既满足血压≥140/90mmHg,又满足血压小于90/60mmHg时,只取前者
-        String addStandWords = "";
-        String smallerStandWord = "";
-        boolean flag = true;
-        for (String standWord : newStandWord.split(",")) {
-            if (!"".equals(standWord) || standWord.length() > 0) {
-                if (standWord.contains("<") || standWord.contains("<=") || standWord.contains("小于")) {
-                    smallerStandWord += "," + standWord;
-                } else {
-                    addStandWords += "," + proxy(standWord);
-                    flag = false;
-                }
-            }
-        }
-        if (flag) {
-            addStandWords += smallerStandWord;
-        }
-        addbloodPressure(sData, itemType, addStandWords);
-        return sData;
-    }
-
-
-    /**
-     * 将标准词中体征指标值(数字)与分词结果中体征指标值(数字)比较
-     * 除了血压>140/90mmHg类似标准词,其他标准词直接添加在症状后面
-     *
-     * @param features
-     * @param standWordMap
-     * @param standWord
-     * @param sData
-     * @return 血压>140/90mmHg或血压小于90/60mmHg或同时返回,在addStandWord()中进一步处理
-     */
-    private String judgment(String[] features, Map<String, String> standWordMap, String standWord, SearchData sData, String itemType) {
-        if (hasDigit(features[1])) {
-            try {
-                if (">".equals(standWordMap.get("op"))) {
-                    //单独处理  血压>140/90mmHg   类似情况
-                    if (features[1].contains("/")) {
-                        if (standWordMap.get("value").contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP > standWordSBP || featuresDBP > standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        //"symptom","other","vital","lis","pacs","diag"
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) > Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                } else if ("<".equals(standWordMap.get("op"))) {
-                    //单独处理  血压小于90/60mmHg   类似情况
-                    if (standWordMap.get("value").contains("/")) {
-                        if (features[1].contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP < standWordSBP || featuresDBP < standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) < Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                } else if (">=".equals(standWordMap.get("op"))) {
-                    //单独处理  血压大于等于140/90mmHg   类似情况
-                    if (standWordMap.get("value").contains("/")) {
-                        if (features[1].contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP >= standWordSBP || featuresDBP >= standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) >= Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                } else if ("<=".equals(standWordMap.get("op"))) {
-                    //单独处理  血压小于等于90/60mmHg   类似情况
-                    if (standWordMap.get("value").contains("/")) {
-                        if (features[1].contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP <= standWordSBP || featuresDBP <= standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) <= Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        return standWord;
-    }
-
-    /**
-     * 根据不同项目添加标准词
-     *
-     * @param standWordMap
-     * @param sData
-     * @param itemType
-     */
-    private void setStandword(Map<String, String> standWordMap, SearchData sData, String itemType) {
-        switch (itemType) {
-            case "symptom":
-                if (sData.getSymptom().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setSymptom(sData.getSymptom() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "other":
-                if (sData.getOther().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setOther(sData.getOther() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "vital":
-                if (sData.getVital().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setVital(sData.getVital() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "lis":
-                if (sData.getLis().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setLis(sData.getLis() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "pacs":
-                if (sData.getPacs().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setPacs(sData.getPacs() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "diag":
-                if (sData.getDiag().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setDiag(sData.getDiag() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-        }
-    }
-
-    /**
-     * 添加血压(血压既满足血压≥140/90mmHg,又满足血压小于90/60mmHg时,只取前者)
-     *
-     * @param sData
-     * @param itemType
-     * @param addStandWords
-     */
-    private void addbloodPressure(SearchData sData, String itemType, String addStandWords) {
-        switch (itemType) {
-            case "symptom":
-                if (sData.getSymptom().indexOf(addStandWords) == -1) {
-                    sData.setSymptom(sData.getSymptom() + "," + addStandWords);
-                }
-                break;
-            case "other":
-                if (sData.getOther().indexOf(addStandWords) == -1) {
-                    sData.setOther(sData.getOther() + "," + addStandWords);
-                }
-                break;
-            case "vital":
-                if (sData.getVital().indexOf(addStandWords) == -1) {
-                    sData.setVital(sData.getVital() + "," + addStandWords);
-                }
-                break;
-            case "lis":
-                if (sData.getLis().indexOf(addStandWords) == -1) {
-                    sData.setLis(sData.getLis() + "," + addStandWords);
-                }
-                break;
-            case "pacs":
-                if (sData.getPacs().indexOf(addStandWords) == -1) {
-                    sData.setPacs(sData.getPacs() + "," + addStandWords);
-                }
-                break;
-            case "diag":
-                if (sData.getDiag().indexOf(addStandWords) == -1) {
-                    sData.setDiag(sData.getDiag() + "," + addStandWords);
-                }
-                break;
-        }
-    }
-
-    /**
-     * 判断分词后的特征中是否含有数字
-     *
-     * @param content
-     * @return
-     */
-    private boolean hasDigit(String content) {
-        boolean flag = false;
-        if (Pattern.compile(".*\\d+.*").matcher(content).matches()) {
-            flag = true;
-        }
-        return flag;
-    }
-
-    /**
-     * 将字符串中的数字提取出来,针对分词结果中"90."类似情况
-     *
-     * @param standWord
-     * @return
-     */
-    private String getNum(String standWord) {
-        StringBuffer sb = new StringBuffer();
-        for (String num : standWord.replaceAll("[^0-9]", ",").split(",")) {
-            if (num.length() > 0) {
-                sb.append(num);
-            }
-        }
-        return sb.toString();
-    }
-
-    /**
-     * 将血压超过标准值的标准词改为血压升高
-     *
-     * @param standWord
-     * @return
-     */
-    private String proxy(String standWord) {
-        if (standWord.contains("压") && (standWord.contains("≥") || standWord.contains("大于"))) {
-            standWord = "血压升高";
-        } else if (standWord.contains("心率") && (standWord.contains("大于") || standWord.contains("超过"))) {
-            standWord = "心率快";
-        }
-        return standWord;
-    }
-
-    private SearchData cleanFeature(List<Map<String, Object>> featuresList, FeatureAnalyze fa,
-                                    SearchData searchData) {
-        // 在输入的辅检文本中,只提取辅检信息
-        String[] PACS_Feature = { Constants.word_property_PACS,
-                Constants.word_property_PACS_Detail, Constants.word_property_PACS_Result };
-        searchData = removeFeature(searchData.getLis(), fa, searchData, PACS_Feature, FeatureType.PACS);
-
-        // 在输入的化验文本中,只提取化验信息
-        String[] LIS_Feature = { Constants.word_property_LIS,
-                Constants.word_property_LIS_Detail, Constants.word_property_LIS_Result };
-        searchData = removeFeature(searchData.getPacs(), fa, searchData, LIS_Feature, FeatureType.LIS);
-
-        return searchData;
-    }
-
-    private SearchData removeFeature(String text, FeatureAnalyze fa,
-                                     SearchData searchData, String[] properties, FeatureType featureType) {
-        String name = "";
-        Boolean related = false;
-
-        try {
-            List<Map<String, Object>> featureList = fa.start(text, featureType);
-            if (featureList != null) {
-                for (Map<String, Object> item : featureList) {
-                    name = item.get("feature_name").toString();
-                    String[] property = item.get("property").toString().split(",");
-                    for (String prop : property) {
-                        if (Arrays.asList(properties).contains(prop)) {
-                            //                            related = true;
-                            searchData.getInputs().remove(name);
-                            break;
-                        }
-                    }
-
-                    //                    if (!related) {
-                    //                        searchData.getInputs().remove(name);
-                    //                    }
-                    //9
-                    //                    related = false;
-                }
-            }
-
-        } catch (Exception ex) {
-            ex.printStackTrace();
-        } finally {
-            return searchData;
-        }
-    }
-}

+ 56 - 84
bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java

@@ -1,10 +1,12 @@
 package org.diagbot.bigdata.work;
 
+import com.alibaba.fastjson.JSON;
 import org.apache.commons.lang3.StringUtils;
-import org.diagbot.bigdata.common.ApplicationCacheUtil;
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.common.work.FeatureRate;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.ResultMappingFilter;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
 import org.diagbot.nlp.feature.FeatureType;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.nlp.util.NegativeEnum;
@@ -29,7 +31,6 @@ public class ResultDataProxy {
         DecimalFormat df = new DecimalFormat("0.####");
         List<FeatureRate> featureList = new ArrayList<>(10);
 
-//        Map<String, String> resultMappingVitalMap = ApplicationCacheUtil.getDoc_result_mapping_vital_map();
         Map<String, String> resultMappingDiagMap = ApplicationCacheUtil.getDoc_result_mapping_diag_map();
         Map<String, Map<String, ResultMappingFilter>> resultMappingFilterMap = ApplicationCacheUtil.getDoc_result_mapping_filter_map();
         Map<String, Map<String, String>> synonymMap = ApplicationCacheUtil.getStandard_info_synonym_map();
@@ -67,19 +68,11 @@ public class ResultDataProxy {
                 }
             }
             featureList.add(featureRate);
-//            if (!featureType.equals(Constants.feature_type_diag)) {
-                if (cursor < searchData.getLength()) {
-                    cursor++;
-                } else {
-                    break;
-                }
-//            } else {            //诊断最多返回5个
-//                if (cursor < 5) {
+//                if (cursor < searchData.getLength()) {
 //                    cursor++;
 //                } else {
 //                    break;
 //                }
-//            }
         }
 
         return featureList;
@@ -151,86 +144,65 @@ public class ResultDataProxy {
     }
 
     /**
-     * 大小类数据合并
+     * 化验辅检体征,年龄性别过滤
      *
-     * @param request
-     * @param map
+     * @param responseData
+     * @param searchData
+     * @return
      */
-    public Map<String, Float> resultMerge(HttpServletRequest request, Map<String, Float> map) {
-        Map<String, NlpCache.Node> nodesMap = NlpCache.getStandard_info_type_tree_map();
-        Map<String, Float> resultMap = new HashMap<>();
-        //设定阀值
-        float threshold = 0.001f;
-        Map<String, Float> thresholdMap = new HashMap<>();
-        for (Map.Entry<String, Float> entry : map.entrySet()) {
-            if (!"null".equals(entry.getKey()) && entry.getValue() >= threshold) {
-                thresholdMap.put(entry.getKey(), entry.getValue());
-            }
+    public ResponseData resultSexAgeFilter(HttpServletRequest request, ResponseData responseData, SearchData searchData) {
+        if (responseData.getLabs() != null && responseData.getLabs().size() > 0) {//化验
+            responseData.setLabs(sexFilter(request, responseData.getLabs(), searchData, Constants.feature_type_lis));
         }
-
-        NlpCache.Node node = null;
-        List<String> delList = new ArrayList<>();
-        for (Map.Entry<String, Float> entry : thresholdMap.entrySet()) {
-            if (delList.contains(entry.getKey())) continue;
-
-            node = nodesMap.get(entry.getKey());
-            if (node != null) {
-                String topName = node.getName();
-                NlpCache.Node p = node.getParent();
-                if (p != null && nodesMap.get(p.getName()) != null) {
-                    topName = p.getName();
-                }
-                while (p != null) {
-                    List<String> nodeNamesList = new ArrayList<>();
-                    lookChilds(topName, p, thresholdMap, nodeNamesList);
-                    if (nodeNamesList.size() > 0) {
-                        topName = p.getName();
-                    }
-                    p = p.getParent();
-                }
-
-                if (thresholdMap.get(topName) != null) {
-                    resultMap.put(topName, thresholdMap.get(topName));
-                    delList.add(topName);
-                }
-                NlpCache.Node topNode = nodesMap.get(topName);
-                lookChildsAndCal(resultMap, thresholdMap, topNode, delList, topNode.getName());
-                delList.add(topName);
-            } else {
-                resultMap.put(entry.getKey(), entry.getValue());
-            }
+        if (responseData.getPacs() != null && responseData.getPacs().size() > 0) {//辅检
+            responseData.setPacs(sexFilter(request, responseData.getPacs(), searchData, Constants.feature_type_pacs));
         }
-        return resultMap;
+        if (responseData.getVitals() != null && responseData.getVitals().size() > 0) {//查体
+            responseData.setVitals(sexFilter(request, responseData.getVitals(), searchData, Constants.feature_type_vital));
+        }
+        return responseData;
     }
 
-    private void lookChilds(String own, NlpCache.Node p, Map<String, Float> thresholdMap, List<String> nodeNamesList) {
-        for (NlpCache.Node n : p.getChilds()) {
-            if (own.equals(n.getName())) {
-                continue;
-            } else {
-                if (thresholdMap.get(n.getName()) != null) {
-                    nodeNamesList.add(n.getName());
+    public List<FeatureRate> sexFilter(HttpServletRequest request, List<FeatureRate> featureList, SearchData searchData, String featureType) {
+        List<FeatureRate> featureRates = new ArrayList<>(10);//用来存放传入的数据结果
+        List<FeatureRate> featureRateList = new ArrayList<>(10);//用来存放返回结果
+        Map<String, Map<String, ResultMappingFilter>> resultMappingFilterMap = ApplicationCacheUtil.getDoc_result_mapping_lpvSex_filter_map();
+        boolean isFirst = false;
+        for (int i = 0; i < featureList.size(); i++) {
+            FeatureRate entity = JSON.parseObject(JSON.toJSONString(featureList.get(i)), FeatureRate.class);
+            featureRates.add(entity);
+        }
+        for (FeatureRate featureRate : featureRates) {
+            isFirst = false;
+            //性别年龄过滤
+            Map<String, ResultMappingFilter> filterMap = resultMappingFilterMap.get(featureType);
+            if (filterMap != null) {
+                ResultMappingFilter filter = filterMap.get(featureRate.getFeatureName());
+                if (filter != null) {
+                    if (filter.getSex() != null && !StringUtils.isEmpty(searchData.getSex())
+                            && filter.getSex().equals(searchData.getSex()) || filter.getSex().equals("3")) {      //性别过滤
+                        isFirst = true;
+                    } else {
+                        isFirst = false;
+                    }
+                    //年龄过滤
+                    if (isFirst && filter.getAgeStart() > -1 && searchData.getAge() != 0 && searchData.getAge() < filter.getAgeEnd()) {
+                        isFirst = true;
+                    } else {
+                        isFirst = false;
+                    }
+                    if (isFirst && filter.getAgeEnd() > -1 && searchData.getAge() != 0 && searchData.getAge() > filter.getAgeStart()) {
+                        isFirst = true;
+                    } else {
+                        isFirst = false;
+                    }
                 }
-                if (n.getChilds().size() > 0) {
-                    lookChilds("", n, thresholdMap, nodeNamesList);
+                if (isFirst) {
+                    featureRateList.add(featureRate);
                 }
             }
         }
+        return featureRateList;
     }
 
-    private void lookChildsAndCal(Map<String, Float> resultMap, Map<String, Float> thresholdMap, NlpCache.Node node, List<String> delList, String topName) {
-        for (NlpCache.Node n : node.getChilds()) {
-            if (thresholdMap.get(n.getName()) != null) {
-                if (resultMap.get(topName) == null) {
-                    resultMap.put(topName, thresholdMap.get(n.getName()));
-                } else {
-                    resultMap.put(topName, resultMap.get(topName) + thresholdMap.get(n.getName()));
-                }
-                delList.add(n.getName());
-            }
-            if (n.getChilds().size() > 0) {
-                lookChildsAndCal(resultMap, thresholdMap, n, delList, topName);
-            }
-        }
-    }
 }

+ 37 - 41
bigdata-web/src/test/java/org/diagbot/AddStandWordTest.java

@@ -1,8 +1,8 @@
 package org.diagbot;
 
-import org.diagbot.bigdata.common.ApplicationCacheUtil;
-import org.diagbot.bigdata.work.ParamsDataProxy;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.bigdata.work.BigDataParamsProxy;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
 import org.diagbot.nlp.participle.ParticipleUtil;
 import org.diagbot.nlp.participle.cfg.Configuration;
 import org.diagbot.nlp.participle.cfg.DefaultConfig;
@@ -11,10 +11,6 @@ import org.diagbot.nlp.participle.word.LexemePath;
 
 import java.io.IOException;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 /**
  * @Description:
@@ -47,39 +43,39 @@ public class AddStandWordTest {
         long seconds = endTime - startTime;
         System.out.println("添加标准词使用了:"+splitSeconds + "毫秒.");
         System.out.println("处理文本总共使用了:"+seconds + "毫秒.");*/
-        for (int j = 1; j < 11; j++) {
-
-
-            SearchData searchData = new SearchData();
-            ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
-            searchData.setSymptom("安静时心率98次/分");
-            searchData.setOther("心率156次/分");
-            searchData.setVital("男性43岁");
-            searchData.setLis("Hachinski缺血积分2分");
-            searchData.setPacs("病程9个月");
-            searchData.setDiag("BMI12");
-            String[] items = { searchData.getSymptom(), searchData.getOther(), searchData.getVital()
-                    , searchData.getLis(), searchData.getPacs(), searchData.getDiag() };
-            String[] itemsType = { "symptom", "other", "vital", "lis", "pacs", "diag" };
-            long startTime = System.currentTimeMillis();
-            for (int i = 0; i < items.length; i++) {
-                if (items[i] != null) {
-                    LexemePath<Lexeme> featureData = ParticipleUtil.participle(items[i]);
-                    if (featureData != null) {
-                        paramsDataProxy.addStandWord(featureData, ApplicationCacheUtil.getKl_result_mapping_standword_map(), searchData, itemsType[i]);
-                    }
-                }
-            }
-            System.out.println(searchData.getSymptom());
-            System.out.println(searchData.getOther());
-            System.out.println(searchData.getVital());
-            System.out.println(searchData.getLis());
-            System.out.println(searchData.getPacs());
-            System.out.println(searchData.getDiag());
-            long endTime = System.currentTimeMillis();
-            System.out.println("处理文本总共使用了:" + (endTime - startTime) + "毫秒.");
-            System.out.println("---------------------------第" + j + "次处理---------------------------------");
-        }
+//        for (int j = 1; j < 11; j++) {
+//
+//
+//            SearchData searchData = new SearchData();
+//            BigDataParamsProxy paramsDataProxy = new BigDataParamsProxy();
+//            searchData.setSymptom("安静时心率98次/分");
+//            searchData.setOther("心率156次/分");
+//            searchData.setVital("男性43岁");
+//            searchData.setLis("Hachinski缺血积分2分");
+//            searchData.setPacs("病程9个月");
+//            searchData.setDiag("BMI12");
+//            String[] items = { searchData.getSymptom(), searchData.getOther(), searchData.getVital()
+//                    , searchData.getLis(), searchData.getPacs(), searchData.getDiag() };
+//            String[] itemsType = { "symptom", "other", "vital", "lis", "pacs", "diag" };
+//            long startTime = System.currentTimeMillis();
+//            for (int i = 0; i < items.length; i++) {
+//                if (items[i] != null) {
+//                    LexemePath<Lexeme> featureData = ParticipleUtil.participle(items[i]);
+//                    if (featureData != null) {
+//                        paramsDataProxy.addStandWord(featureData, ApplicationCacheUtil.getKl_result_mapping_standword_map(), searchData, itemsType[i]);
+//                    }
+//                }
+//            }
+//            System.out.println(searchData.getSymptom());
+//            System.out.println(searchData.getOther());
+//            System.out.println(searchData.getVital());
+//            System.out.println(searchData.getLis());
+//            System.out.println(searchData.getPacs());
+//            System.out.println(searchData.getDiag());
+//            long endTime = System.currentTimeMillis();
+//            System.out.println("处理文本总共使用了:" + (endTime - startTime) + "毫秒.");
+//            System.out.println("---------------------------第" + j + "次处理---------------------------------");
+//        }
         //        String s = "\tsafGG\tAFASSADG";
         //        System.out.println(s);
         //        System.out.println(s.trim());
@@ -122,7 +118,7 @@ public class AddStandWordTest {
             int i = 1;
             for (Lexeme lexeme : lexemes) {
                 if (lexeme.getProperty().contains(",")) {
-                    ApplicationCacheUtil.setProterty(lexeme);
+//                    ApplicationCacheUtil.setProterty(lexeme);
                 }
                 if (lexemes.size() != i) {
                     System.out.print(lexeme.getText() + "(" + lexeme.getProperty() + ")|");

+ 135 - 0
bigdata-web/src/test/java/org/diagbot/EyeHospitalData.java

@@ -0,0 +1,135 @@
+package org.diagbot;
+
+import org.diagbot.pub.jdbc.MysqlJdbc;
+
+import java.io.BufferedReader;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.sql.*;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.Date;
+import java.util.stream.Collectors;
+
+/**
+ * Created by louhr on 2019/9/3.
+ */
+public class EyeHospitalData {
+
+    public static void main(String[] args) {
+//        EyeHospitalData ehl = new EyeHospitalData();
+//        Connection conn = ehl.createOracleJdbc();
+//        ehl.queryHospitalInput(conn);
+    }
+
+//    private List<Map<String, Object>> queryHospitalInput(Connection conn) {
+//        PreparedStatement pstmt = null;
+//        ResultSet rs = null;
+//        List<Map<String, Object>> list = new ArrayList<>();
+//        String ipid = "";
+//        try {
+//
+//            DateFormat df = new SimpleDateFormat("yyyyMMdd");
+//
+//            Calendar cal = Calendar.getInstance();
+//            cal.setTime(new Date());
+//            String end_time = df.format(cal.getTime());
+//
+//            cal.add(Calendar.DATE, -7);
+//            String start_time = df.format(cal.getTime());
+//
+//
+//            int record_cnt = 1;
+//
+//            while (start_time.compareTo("20190801") > -1) {
+//                System.out.println(start_time + "..." + end_time);
+//
+//                String sql = "select xml_cont, ipid, pid, dept_name, dept_code, create_time from inpcase.hospital_record " +
+//                        "where substr(create_time, 0, 8) > '" + start_time + "' and substr(create_time, 0, 8) <= '" + end_time + "'";
+//                pstmt = conn.prepareStatement(sql);
+//                //建立一个结果集,用来保存查询出来的结果
+//                rs = pstmt.executeQuery();
+//
+//
+//                while (rs.next()) {
+//                    if (record_cnt % 100 == 0) {
+//                        System.out.println("已查询" + record_cnt + "行数据!");
+//                    }
+//                    Map<String, Object> map = new HashMap<>();
+//                    OracleResultSet ors = (OracleResultSet) rs;
+//                    OPAQUE op = ors.getOPAQUE(1);
+//                    ipid = ors.getString(2);
+//                    String pid = ors.getString(3);
+//                    String dept_name = ors.getString(4);
+//                    String dept_code = ors.getString(5);
+//                    String create_time = ors.getString(6);
+//
+//                    XMLType xml = XMLType.createXML(op);
+//                    String xml_cont = xml.getStringVal();
+//                    xml_cont = xml_cont.substring(xml_cont.indexOf("<text>") + 6, xml_cont.indexOf("</text>"));
+//
+//                    String sex = xml_cont.substring(xml_cont.indexOf("性  别:") + 5, xml_cont.indexOf("性  别:") + 8);
+//                    String age = xml_cont.substring(xml_cont.indexOf("年  龄:") + 5, xml_cont.indexOf("年  龄:") + 8);
+//                    String marry = xml_cont.substring(xml_cont.indexOf("婚  姻:") + 5, xml_cont.indexOf("婚  姻:") + 8);
+//                    String in_hospital = xml_cont.substring(xml_cont.indexOf("入院日期:") + 5, xml_cont.indexOf("入院日期:") + 22);
+//                    String content = xml_cont.substring(xml_cont.indexOf("主  诉:"), xml_cont.indexOf("医师签名:"));
+//
+//                    map.put("ipid", ipid);
+//                    map.put("pid", pid);
+//                    map.put("dept_name", dept_name);
+//                    map.put("dept_code", dept_code);
+//                    map.put("create_time", create_time);
+//                    map.put("sex", sex);
+//                    map.put("age", age);
+//                    map.put("marry", marry);
+//                    map.put("in_hospital", in_hospital);
+//                    map.put("content", content);
+//
+//                    System.out.println(sex);
+//                    System.out.println(age);
+//                    System.out.println(marry);
+//                    System.out.println(in_hospital);
+//                    System.out.println(content);
+//
+//                    list.add(map);
+//
+//                    record_cnt++;
+//                }
+//
+//                end_time = start_time;
+//                cal.add(Calendar.DATE, -7);
+//                start_time = df.format(cal.getTime());
+//            }
+//        } catch (Exception e) {
+//            System.out.println(ipid);
+//            e.printStackTrace();
+//        } finally {
+//            try {
+//                rs.close();
+//                pstmt.close();
+//            }catch (SQLException sqle) {
+//                sqle.printStackTrace();
+//            }
+//        }
+//        return list;
+//    }
+//
+//    private void insertMysql(List<Map<String, Object>> list) {
+//        MysqlJdbc nlpJdbc = new MysqlJdbc("root", "", "jdbc:mysql://127.0.0.1:3306/eye_hospital?useUnicode=true&characterEncoding=UTF-8");
+//        nlpJdbc.insert(list, "hospital_record", new String[]{"ipid", "pid", "dept_name", "dept_code", "create_time", "sex", "age", "marry", "in_hospital", "content"});
+//    }
+//
+//    private Connection createOracleJdbc() {
+//        Connection conn = null;
+//        try {
+//            Class.forName("oracle.jdbc.driver.OracleDriver");
+//            conn = DriverManager.getConnection("jdbc:oracle:thin:@//172.17.1.143:1521/orc1",
+//                    "louhr", "louhr");
+//            return conn;
+//        } catch (Exception e) {
+//            e.printStackTrace();
+//        }
+//        return conn;
+//    }
+}

+ 231 - 0
bigdata-web/src/test/java/org/diagbot/Rule2AppTest.java

@@ -0,0 +1,231 @@
+package org.diagbot;
+
+import jxl.Cell;
+import jxl.Sheet;
+import jxl.Workbook;
+import jxl.read.biff.BiffException;
+import jxl.write.Label;
+import jxl.write.WritableCellFormat;
+import jxl.write.WritableFont;
+import jxl.write.WritableSheet;
+import jxl.write.WritableWorkbook;
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.pub.jdbc.MysqlJdbc;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/11/21 17:08
+ */
+public class Rule2AppTest {
+    public static void main(String[] args) {
+        Map<String, String> ruleId = getRuleId();
+        String fileNameRead = "D:\\大数据小组\\慢病\\11.病历评级案例-20191126.xls";
+        String fileNameWrite = "D:\\大数据小组\\慢病\\病历评级规则app列表20191126.xls";
+        readExcel(fileNameRead);
+        writeExcel(ruleId, readExcel(fileNameRead), fileNameWrite);
+    }
+
+    public static void writeExcel(Map<String, String> ruleId, List<List<String>> ruleList, String fileName) {
+        String[] title = { "编码", "是否删除", "记录创建时间", "记录修改时间", "创建人", "修改人", "指标名称", "规则类别", "提醒内容" };
+        File file = new File(fileName);
+        if (file.exists()) {
+            //如果文件存在就删除
+            file.delete();
+        }
+        try {
+            file.createNewFile();
+            WritableFont titleFont = new WritableFont(WritableFont.createFont("宋体"), 10, WritableFont.BOLD);
+            WritableFont contentFont = new WritableFont(WritableFont.createFont("宋体"), 10);
+            WritableCellFormat format = new WritableCellFormat(titleFont);
+            //创建工作簿
+            WritableWorkbook workbookA = Workbook.createWorkbook(file);
+            //创建sheet
+            WritableSheet sheetA = workbookA.createSheet("sheet1", 0);
+            Label labelA = null;
+            //设置列名
+            for (int j = 0; j < title.length; j++) {
+                labelA = new Label(j, 0, title[j], format);
+                sheetA.addCell(labelA);
+            }
+            int row = 1;
+            format = new WritableCellFormat(contentFont);
+            for (int i = 0; i < ruleList.size(); i++) {
+                List<String> rule = ruleList.get(i);
+                if (rule.size() == 2 || rule.size() == 3) {
+                    String[] pacsOrder = rule.get(0).split("、");
+                    String[] rules = rule.get(1).split("、");
+                    for (int j = 0; j < pacsOrder.length; j++) {
+                        String content = "";
+                        String remind = "";
+                        for (int k = 0; k < rules.length; k++) {
+                            /** 原规则app样式
+                             * labelA = new Label(6, row, ruleId.get(pacsOrder[j]) + "," + ruleId.get(rules[k]));
+                             * sheetA.addCell(labelA);
+                             * labelA = new Label(8, row, rules[k] + ",不宜做" + pacsOrder[j]);
+                             * sheetA.addCell(labelA);
+                             * row++;*/
+                            String feature = ruleId.get(rules[k]) != null ? ruleId.get(rules[k]) : rules[k];
+                            if (rules[k].contains("血小板计数(PLT)")){
+                                feature = "1131";
+                            } else if (rules[k].contains("经期")){
+                                feature = "70、71";
+                            } else if (rules[k].contains("妊娠")){
+                                feature = "1164、1165";
+                                remind = rule.size()>2?rule.get(2):"妊娠不宜做此项检查";
+                            } else if (rules[k].contains("妊娠1-3个月")){
+                                feature = "1166、1167、1168、1169";
+//                                remind = rule.size()>2?rule.get(2):"妊娠不宜做此项检查";
+                            } else if (rules[k].contains("男性")){
+                                remind = rule.size()>2?rule.get(2):"男性不宜做此项检查";
+                            } else if (rules[k].contains("女性")){
+                                remind = rule.size()>2?rule.get(2):"女性不宜做此项检查";
+                            }
+
+                            String[] featureArr = feature.split("、");
+                            for (int l = 0; l < featureArr.length; l++) {
+                                if (k == rules.length - 1 && l == featureArr.length - 1) {
+                                    content += ruleId.get(pacsOrder[j]) + "," + featureArr[l];
+                                } else {
+                                    content += ruleId.get(pacsOrder[j]) + "," + featureArr[l] + "^";
+                                }
+                            }
+                        }
+//                        write2DB(content,StringUtils.isEmpty(remind)?"请注意:该患者${remind}":remind);
+                        labelA = new Label(6, row, content,format);
+                        sheetA.addCell(labelA);
+                        labelA = new Label(8, row, StringUtils.isEmpty(remind)?"请注意:该患者${remind}":remind,format);
+                        sheetA.addCell(labelA);
+                        row++;
+                    }
+                }
+                //                } else if (rule.size() == 3){
+                //                    String[] rules = rule.get(1).split("、");
+                //                    for (int j = 0; j < rules.length; j++) {
+                //                        labelA = new Label(6, row, ruleId.get(rule.get(0))+","+ruleId.get(rules[j]));
+                //                        sheetA.addCell(labelA);
+                //                        labelA = new Label(8, row, rule.get(2));
+                //                        sheetA.addCell(labelA);
+                //                        row++;
+                //                    }
+                //                }
+            }
+            System.out.println("成功写入文件,请前往" + fileName + "查看文件!");
+            workbookA.write();  //写入数据
+            workbookA.close();
+        } catch (Exception e) {
+            e.printStackTrace();
+            System.out.println("文件写入失败,报异常...");
+        }
+    }
+
+    public static List<List<String>> readExcel(String fileName) {
+        List<List<String>> ruleList = new ArrayList<>();
+        List<String> rule = null;
+        FileInputStream fis = null;
+        try {
+            fis = new FileInputStream(new File(fileName));
+            Workbook rwb = Workbook.getWorkbook(fis);
+            Sheet[] sheet = rwb.getSheets();
+//            for (int i = 0; i < sheet.length; i++) {
+                Sheet rs = rwb.getSheet(0);
+                for (int j = 1; j < rs.getRows(); j++) {
+                    rule = new ArrayList<>();
+                    String content = "";
+                    Cell[] cells = rs.getRow(j);
+                    for (int k = 0; k < cells.length; k++) {
+                        if (k == 0 || k == 2 || k == 3) {
+                            content = cells[k].getContents();
+                            rule.add(content.trim());
+                        }
+                    }
+                    ruleList.add(rule);
+                }
+//            }
+            fis.close();
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();
+        } catch (IOException e) {
+            e.printStackTrace();
+        } catch (BiffException e) {
+            e.printStackTrace();
+        }
+        return ruleList;
+    }
+
+    public static void write2DB(String ruleId,String remind){
+        MysqlJdbc jdbc = new MysqlJdbc( "root", "diagbot@20180822","jdbc:mysql://192.168.2.235:3306/med-s?useUnicode=true&characterEncoding=UTF-8");
+        Connection conn = jdbc.connect();
+        PreparedStatement pstUpdate = null;
+        String sql = "INSERT INTO kl_rule_app(rule_id,type_id,remind) VALUES (?,?,?)";
+
+        try {
+            pstUpdate = conn.prepareStatement(sql);
+            pstUpdate.setString(1,ruleId);
+            pstUpdate.setString(2,"2");
+            pstUpdate.setString(3,remind);
+            System.out.println(ruleId + "\t" + remind);
+            pstUpdate.executeUpdate();
+            pstUpdate.close();
+            conn.close();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+    }
+
+    public static Map<String, String> getRuleId() {
+        Map<String, String> ruleId = new HashMap<>();
+        MysqlJdbc jdbc = new MysqlJdbc( "root", "diagbot@20180822","jdbc:mysql://192.168.2.235:3306/med-s?useUnicode=true&characterEncoding=UTF-8");
+        Connection conn = jdbc.connect();
+        PreparedStatement pstQuery = null;
+        ResultSet rs = null;
+        String sql = "SELECT id,eq_value FROM kl_rule_pub WHERE eq_value is not null";
+        try {
+            pstQuery = conn.prepareStatement(sql);
+            rs = pstQuery.executeQuery();
+            while (rs.next()) {
+                String id = String.valueOf(rs.getInt(1));
+                String eqValue = rs.getString(2).trim();
+                ruleId.put(eqValue, id);
+            }
+            ruleId.put("收缩压≥180mmHg", "238");
+            ruleId.put("舒张压≥110mmHg", "239");
+            ruleId.put("收缩压≤80mmHg", "29");
+            ruleId.put("舒张压≤50mmHg", "30");
+            ruleId.put("收缩压≥140mmHg", "273");
+            ruleId.put("舒张压≥90mmHg", "274");
+            ruleId.put("体温>39.1℃", "272");
+            ruleId.put("体温>37.3℃", "1123");
+            ruleId.put("凝血酶原时间(PT)对照>5s", "1132");
+            ruleId.put("活化部分凝血活酶(APTT)对照>10s", "1133");
+            ruleId.put("T7至L3椎体MR增强", "1648");
+//            ruleId.put("体温>39.0℃", "743");
+            ruleId.put("心率<60次/分", "1128");
+            ruleId.put("心率≥120次/分", "28");
+            ruleId.put("肾病综合征+胸腔积液", "1211,1140");
+            ruleId.put("肝硬化+胸腔积液", "1212,1140");
+            ruleId.put("心力衰竭+胸腔积液", "60,1140");
+            ruleId.put("低蛋白血症+胸腔积液", "1213,1140");
+            rs.close();
+            pstQuery.close();
+            conn.close();
+        } catch (SQLException e) {
+            e.printStackTrace();
+        }
+        return ruleId;
+    }
+}

+ 191 - 0
bigdata-web/src/test/java/org/diagbot/RuleTest.java

@@ -0,0 +1,191 @@
+package org.diagbot;
+
+import org.diagbot.common.push.bean.Rule;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.filter.rule.PretreatmentRule;
+import org.diagbot.nlp.rule.module.PreResult;
+import org.diagbot.pub.jdbc.MysqlJdbc;
+import org.springframework.util.StringUtils;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Created by louhr on 2019/11/22.
+ */
+public class RuleTest {
+    public static void main(String[] args) {
+        RuleTest test = new RuleTest();
+        try {
+            test.validatePub();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void validatePub() throws Exception {
+        MysqlJdbc nlpJdbc = new MysqlJdbc("root", "lantone", "jdbc:mysql://192.168.2.236:3306/med?useUnicode=true&characterEncoding=UTF-8");
+        List<Map<String, String>> data = nlpJdbc.query("kl_rule_pub",
+                new String[]{"id", "pub_name", "min_operator", "min_value", "min_unit",
+                        "max_operator", "max_value", "max_unit", "eq_operator", "eq_value", "eq_unit", "remind", "suffix_info"}, "");
+
+        List<Map<String, Object>> updates = new ArrayList<>();
+        List<Map<String, Object>> wheres = new ArrayList<>();
+        //数据预处理
+        PretreatmentRule pretreatmentRule = new PretreatmentRule();
+        for (Map<String, String> map : data) {
+            SearchData searchData = new SearchData();
+            searchData.setSysCode("1");
+            createSearchData(searchData, map);
+
+            Map<String, Object> line = new HashMap<>();
+            Map<String, Object> where = new HashMap<>();
+
+            pretreatmentRule.rule(searchData);
+            Map<String, List<Rule>> rules = searchData.getRules();
+            for (Map.Entry<String, List<Rule>> entry : rules.entrySet()) {
+                if (entry.getValue().size() > 0) {
+                    line.put("rule_type", "9");
+                    where.put("id", map.get("id"));
+
+                    updates.add(line);
+                    wheres.add(where);
+                    break;
+                }
+            }
+        }
+
+        nlpJdbc.update("kl_rule_pub", updates, wheres);
+    }
+
+    private void validateApp() throws Exception {
+        MysqlJdbc nlpJdbc = new MysqlJdbc("root", "diagbot@20180822", "jdbc:mysql://192.168.2.235:3306/med-s?useUnicode=true&characterEncoding=UTF-8");
+        List<Map<String, String>> data = nlpJdbc.query("kl_rule_pub",
+                new String[]{"id", "pub_name", "min_operator", "min_value", "min_unit",
+                        "max_operator", "max_value", "max_unit", "eq_operator", "eq_value", "eq_unit", "remind", "suffix_info"}, " where pub_name like '%体征%'");
+
+        Map<String, Map<String, String>> rulesMap = new HashMap<>();
+        for (Map<String, String> map : data) {
+            rulesMap.put(map.get("id"), map);
+        }
+
+        data = nlpJdbc.query("kl_rule_app", new String[]{"id", "rule_id"}, "");
+        for (Map<String, String> map : data) {
+
+        }
+
+        List<Map<String, Object>> updates = new ArrayList<>();
+        List<Map<String, Object>> wheres = new ArrayList<>();
+    }
+
+    private SearchData createSearchData(SearchData searchData, Map<String, String> map) {
+        double value = 0.0;
+        if (!StringUtils.isEmpty(map.get("eq_operator")) && "=".equals(map.get("eq_operator"))) {
+            if ("诊断--".equals(map.get("pub_name"))) {
+                searchData.setDiagString(map.get("eq_value"));
+            }
+            if ("体征--".equals(map.get("pub_name"))) {
+                searchData.setVital(map.get("eq_value"));
+            }
+            if ("开单--".equals(map.get("pub_name"))) {
+                List<PreResult> results = new ArrayList<>();
+                PreResult preResult = new PreResult();
+                preResult.setValue(map.get("eq_value"));
+                results.add(preResult);
+                searchData.setLisOrder(results);
+                searchData.setPacsOrder(results);
+            }
+            if ("症状--".equals(map.get("pub_name"))) {
+                searchData.setChief(map.get("eq_value"));
+            }
+            if ("药品--".equals(map.get("pub_name"))) {
+                searchData.setDrugString(map.get("eq_value"));
+            }
+            if ("既往--".equals(map.get("pub_name"))) {
+                searchData.setPasts(map.get("eq_value"));
+            }
+            if ("过敏--".equals(map.get("pub_name"))) {
+                searchData.setAllergy(map.get("eq_value"));
+            }
+            if (map.get("pub_name").indexOf("其他--") > -1) {
+                searchData.setOther(map.get("pub_name") + map.get("eq_value"));
+            }
+            if (map.get("pub_name").indexOf("检查--") > -1) {
+                searchData.setPacsString(map.get("pub_name") + map.get("eq_value"));
+            }
+        } else if (!StringUtils.isEmpty(map.get("min_operator")) && !StringUtils.isEmpty(map.get("max_operator"))) {
+            if (map.get("pub_name").indexOf("体征--") > -1) {
+                value = Double.valueOf(map.get("min_value")) + 0.1;
+                searchData.setVital(map.get("pub_name") + value + map.get("min_unit"));
+            }
+            if (map.get("pub_name").indexOf("其他--") > -1) {
+                value = Double.valueOf(map.get("min_value")) + 0.1;
+                searchData.setOther(map.get("pub_name") + value + map.get("min_unit"));
+            }
+            if (map.get("pub_name").indexOf("化验--") > -1) {
+                value = Double.valueOf(map.get("min_value")) + 0.1;
+                List<PreResult> list = new ArrayList<>();
+                PreResult preResult = new PreResult();
+                preResult.setUniqueName(map.get("pub_name").substring(4));
+                preResult.setValue(String.valueOf(value));
+                preResult.setUnits(map.get("min_unit"));
+                list.add(preResult);
+                searchData.setLis(list);
+            }
+            if (map.get("pub_name").indexOf("年龄--") > -1) {
+                int v = Integer.valueOf(map.get("min_value")) + 1;
+                searchData.setAge(v);
+            }
+        } else if (!StringUtils.isEmpty(map.get("min_operator"))) {
+            if (map.get("pub_name").indexOf("体征--") > -1) {
+                value = Double.valueOf(map.get("min_value")) - 0.1;
+                searchData.setVital(map.get("pub_name") + value + map.get("min_unit"));
+            }
+            if (map.get("pub_name").indexOf("其他--") > -1) {
+                value = Double.valueOf(map.get("min_value")) - 0.1;
+                searchData.setOther(map.get("pub_name") + value + map.get("min_unit"));
+            }
+            if (map.get("pub_name").indexOf("化验--") > -1) {
+                value = Double.valueOf(map.get("min_value")) - 0.1;
+                List<PreResult> list = new ArrayList<>();
+                PreResult preResult = new PreResult();
+                preResult.setUniqueName(map.get("pub_name").substring(4));
+                preResult.setValue(String.valueOf(value));
+                preResult.setUnits(map.get("min_unit"));
+                list.add(preResult);
+                searchData.setLis(list);
+            }
+            if (map.get("pub_name").indexOf("年龄--") > -1) {
+                int v = Integer.valueOf(map.get("min_value")) - 1;
+                searchData.setAge(v);
+            }
+        } else if (!StringUtils.isEmpty(map.get("max_operator"))) {
+            if (map.get("pub_name").indexOf("体征--") > -1) {
+                value = Double.valueOf(map.get("max_value")) + 0.1;
+                searchData.setVital(map.get("pub_name") + value + map.get("max_unit"));
+            }
+            if (map.get("pub_name").indexOf("其他--") > -1) {
+                value = Double.valueOf(map.get("max_value")) + 0.1;
+                searchData.setOther(map.get("pub_name") + value + map.get("max_unit"));
+            }
+            if (map.get("pub_name").indexOf("化验--") > -1) {
+                value = Double.valueOf(map.get("max_value")) + 0.1;
+                List<PreResult> list = new ArrayList<>();
+                PreResult preResult = new PreResult();
+                preResult.setUniqueName(map.get("pub_name").substring(4));
+                preResult.setValue(String.valueOf(value));
+                preResult.setUnits(map.get("max_unit"));
+                list.add(preResult);
+                searchData.setLis(list);
+            }
+            if (map.get("pub_name").indexOf("年龄--") > -1) {
+                int v = Integer.valueOf(map.get("max_value")) + 1;
+                searchData.setAge(v);
+            }
+        }
+
+        return searchData;
+    }
+}

+ 2 - 2
common-push/pom.xml

@@ -22,11 +22,11 @@
             <artifactId>public</artifactId>
             <version>${project.version}</version>
         </dependency>
+
         <dependency>
             <groupId>org.diagbot</groupId>
-            <artifactId>graph</artifactId>
+            <artifactId>nlp</artifactId>
             <version>1.0.0</version>
-            <scope>compile</scope>
         </dependency>
     </dependencies>
 

+ 0 - 11
common-push/src/main/java/org/diagbot/common/push/Test.java

@@ -1,11 +0,0 @@
-package org.diagbot.common.push;
-
-/**
- * @ClassName org.diagbot.common.push.Test
- * @Description TODO
- * @Author fyeman
- * @Date 2019/8/5/005 17:07
- * @Version 1.0
- **/
-public class Test {
-}

+ 43 - 0
common-push/src/main/java/org/diagbot/common/push/bean/CrisisDetail.java

@@ -0,0 +1,43 @@
+package org.diagbot.common.push.bean;
+
+/**
+ * Created by louhr on 2019/8/31.
+ */
+public class CrisisDetail {
+    private String remindText;
+    private String originText = "";
+    private String typeId;
+    private String hisName = "";
+
+    public String getRemindText() {
+        return remindText;
+    }
+
+    public void setRemindText(String remindText) {
+        this.remindText = remindText;
+    }
+
+    public String getOriginText() {
+        return originText;
+    }
+
+    public void setOriginText(String originText) {
+        this.originText = originText;
+    }
+
+    public String getTypeId() {
+        return typeId;
+    }
+
+    public void setTypeId(String typeId) {
+        this.typeId = typeId;
+    }
+
+    public String getHisName() {
+        return hisName;
+    }
+
+    public void setHisName(String hisName) {
+        this.hisName = hisName;
+    }
+}

+ 1 - 1
common-service/src/main/java/org/diagbot/common/work/FeatureRate.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.work;
+package org.diagbot.common.push.bean;
 
 /**
  * Created by fyeman on 2018/1/17.

+ 1 - 1
common-service/src/main/java/org/diagbot/common/work/LisDetail.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.work;
+package org.diagbot.common.push.bean;
 
 public class LisDetail {
     private String detailName;

+ 63 - 0
common-push/src/main/java/org/diagbot/common/push/bean/RelevantFeature.java

@@ -0,0 +1,63 @@
+package org.diagbot.common.push.bean;
+
+/**
+ * @Description:
+ * @author: wangyu
+ * @time: 2019/10/14 16:36
+ */
+public class RelevantFeature {
+    private String id;
+    private String diagnose;
+    private String feature;
+    private String feature_type;
+    private String value_type;
+    private String find_suspect_diagnose;
+
+    public String getId() {
+        return id;
+    }
+
+    public void setId(String id) {
+        this.id = id;
+    }
+
+    public String getDiagnose() {
+        return diagnose;
+    }
+
+    public void setDiagnose(String diagnose) {
+        this.diagnose = diagnose;
+    }
+
+    public String getFeature() {
+        return feature;
+    }
+
+    public void setFeature(String feature) {
+        this.feature = feature;
+    }
+
+    public String getFeature_type() {
+        return feature_type;
+    }
+
+    public void setFeature_type(String feature_type) {
+        this.feature_type = feature_type;
+    }
+
+    public String getValue_type() {
+        return value_type;
+    }
+
+    public void setValue_type(String value_type) {
+        this.value_type = value_type;
+    }
+
+    public String getFind_suspect_diagnose() {
+        return find_suspect_diagnose;
+    }
+
+    public void setFind_suspect_diagnose(String find_suspect_diagnose) {
+        this.find_suspect_diagnose = find_suspect_diagnose;
+    }
+}

+ 41 - 12
common-service/src/main/java/org/diagbot/common/work/ResponseData.java

@@ -1,7 +1,8 @@
-package org.diagbot.common.work;
-import com.alibaba.fastjson.JSONObject;
-import org.diagbot.common.javabean.Filnlly;
-import org.diagbot.common.javabean.MedicalIndication;
+package org.diagbot.common.push.bean;
+
+import org.diagbot.common.push.bean.neo4j.Filnlly;
+import org.diagbot.common.push.bean.neo4j.MedicalIndication;
+import org.diagbot.common.push.bean.neo4j.Treat;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -23,13 +24,17 @@ public class ResponseData {
 
     private List<FeatureRate> beforeCombineDis = new ArrayList<>(10);
 
-    private Map<String, Filnlly> treat = new HashMap<>();
+//    private Map<String, Filnlly> treat = new HashMap<>();
+    private Treat treat;
     private List<FeatureRate> graphWords = new ArrayList<>(10);
     private List<MedicalIndication> medicalIndications;//量表和指标推送
 //    private Map<String,JSONObject> managementEvaluation; //管理评估
     private Map managementEvaluation;
     private List<String> diffDiag;//鉴别诊断
     private List<String> excludeDiag; //排除诊断
+    private String hasIndications = "0";  //是否有病情提示标识 包括危急值 开单合理项 默认无
+
+    private Map<String, List<CrisisDetail>> crisisDetails = new HashMap<>();
 
     public List<String> getDiffDiag() {
         return diffDiag;
@@ -63,6 +68,14 @@ public class ResponseData {
 //        this.managementEvaluation = managementEvaluation;
 //    }
 
+    public Treat getTreat() {
+        return treat;
+    }
+
+    public void setTreat(Treat treatObject) {
+        this.treat = treatObject;
+    }
+
     public List<MedicalIndication> getMedicalIndications() {
         return medicalIndications;
     }
@@ -137,13 +150,13 @@ public class ResponseData {
         this.inputs = inputs;
     }
 
-    public Map<String, Filnlly> getTreat() {
-        return treat;
-    }
-
-    public void setTreat(Map<String, Filnlly> treat) {
-        this.treat = treat;
-    }
+//    public Map<String, Filnlly> getTreat() {
+//        return treat;
+//    }
+//
+//    public void setTreat(Map<String, Filnlly> treat) {
+//        this.treat = treat;
+//    }
 
     public List<FeatureRate> getHistory() {
         return history;
@@ -160,4 +173,20 @@ public class ResponseData {
     public void setBeforeCombineDis(List<FeatureRate> beforeCombineDis) {
         this.beforeCombineDis = beforeCombineDis;
     }
+
+    public Map<String, List<CrisisDetail>> getCrisisDetails() {
+        return crisisDetails;
+    }
+
+    public void setCrisisDetails(Map<String, List<CrisisDetail>> crisisDetails) {
+        this.crisisDetails = crisisDetails;
+    }
+
+    public String getHasIndications() {
+        return hasIndications;
+    }
+
+    public void setHasIndications(String hasIndications) {
+        this.hasIndications = hasIndications;
+    }
 }

+ 1 - 1
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingFilter.java

@@ -1,4 +1,4 @@
-package org.diagbot.bigdata.dao.model;
+package org.diagbot.common.push.bean;
 
 
 public class ResultMappingFilter {

+ 163 - 0
common-push/src/main/java/org/diagbot/common/push/bean/Rule.java

@@ -0,0 +1,163 @@
+package org.diagbot.common.push.bean;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * 触发规则
+ * @author Mark Huang
+ * @since 27/08/2019
+ */
+@Getter
+@Setter
+public class Rule {
+    private String id = "";
+    // 大类名称
+    private String pub_name = "";
+    // 最小值比较符
+    private String min_operator = "";
+    // 最小值
+    private String min_value = "";
+    // 最小值单位
+    private String min_unit = "";
+    // 最大值比较符
+    private String max_operator = "";
+    // 最大值
+    private String max_value = "";
+    // 最大值单位
+    private String max_unit = "";
+    // 标准值 用作等于
+    private String eq_operator = "";
+    // 标准值
+    private String eq_value = "";
+    // 标准值单位
+    private String eq_unit = "";
+    //提醒信息
+    private String remind = "";
+    //追加到文本尾部信息
+    private String suffixInfo = "";
+    //提醒信息
+    private String originText = "";
+    //his名称
+    private String hisName = "";
+
+    public String getId() {
+        return id;
+    }
+
+    public void setId(String id) {
+        this.id = id;
+    }
+
+    public String getPub_name() {
+        return pub_name;
+    }
+
+    public void setPub_name(String pub_name) {
+        this.pub_name = pub_name;
+    }
+
+    public String getMin_operator() {
+        return min_operator;
+    }
+
+    public void setMin_operator(String min_operator) {
+        this.min_operator = min_operator;
+    }
+
+    public String getMin_value() {
+        return min_value;
+    }
+
+    public void setMin_value(String min_value) {
+        this.min_value = min_value;
+    }
+
+    public String getMin_unit() {
+        return min_unit;
+    }
+
+    public void setMin_unit(String min_unit) {
+        this.min_unit = min_unit;
+    }
+
+    public String getMax_operator() {
+        return max_operator;
+    }
+
+    public void setMax_operator(String max_operator) {
+        this.max_operator = max_operator;
+    }
+
+    public String getMax_value() {
+        return max_value;
+    }
+
+    public void setMax_value(String max_value) {
+        this.max_value = max_value;
+    }
+
+    public String getMax_unit() {
+        return max_unit;
+    }
+
+    public void setMax_unit(String max_unit) {
+        this.max_unit = max_unit;
+    }
+
+    public String getEq_operator() {
+        return eq_operator;
+    }
+
+    public void setEq_operator(String eq_operator) {
+        this.eq_operator = eq_operator;
+    }
+
+    public String getEq_value() {
+        return eq_value;
+    }
+
+    public void setEq_value(String eq_value) {
+        this.eq_value = eq_value;
+    }
+
+    public String getEq_unit() {
+        return eq_unit;
+    }
+
+    public void setEq_unit(String eq_unit) {
+        this.eq_unit = eq_unit;
+    }
+
+    public String getRemind() {
+        return remind;
+    }
+
+    public void setRemind(String remind) {
+        this.remind = remind;
+    }
+
+    public String getSuffixInfo() {
+        return suffixInfo;
+    }
+
+    public void setSuffixInfo(String suffixInfo) {
+        this.suffixInfo = suffixInfo;
+    }
+
+    public String getOriginText() {
+        return originText;
+    }
+
+    public void setOriginText(String originText) {
+        this.originText = originText;
+    }
+
+    public String getHisName() {
+        return hisName;
+    }
+
+    public void setHisName(String hisName) {
+        this.hisName = hisName;
+    }
+}

+ 45 - 0
common-push/src/main/java/org/diagbot/common/push/bean/RuleApp.java

@@ -0,0 +1,45 @@
+package org.diagbot.common.push.bean;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/9/3 13:21
+ */
+public class RuleApp {
+    private String id;
+    private String ruleIds;
+    private String typeId;
+    private String remind;
+
+    public String getId() {
+        return id;
+    }
+
+    public void setId(String id) {
+        this.id = id;
+    }
+
+    public String getRuleIds() {
+        return ruleIds;
+    }
+
+    public void setRuleIds(String ruleIds) {
+        this.ruleIds = ruleIds;
+    }
+
+    public String getTypeId() {
+        return typeId;
+    }
+
+    public void setTypeId(String typeId) {
+        this.typeId = typeId;
+    }
+
+    public String getRemind() {
+        return remind;
+    }
+
+    public void setRemind(String remind) {
+        this.remind = remind;
+    }
+}

+ 509 - 0
common-push/src/main/java/org/diagbot/common/push/bean/SearchData.java

@@ -0,0 +1,509 @@
+package org.diagbot.common.push.bean;
+
+import org.diagbot.nlp.rule.module.PreResult;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class SearchData {
+    protected int length = 10;
+    protected int age_start = 0;
+    protected int age_end = 200;
+    protected int age = 0;
+    protected String sex;
+    //婚姻
+    protected String marriage;
+    // 搜索结果的贝叶斯阈值
+    protected String threshold = "0";
+    //特征类别
+    protected String featureType;
+    //诊断类别
+    protected Integer disType; //0:复诊,1:急诊
+    //特征类别对","进行分割后数据
+    protected String[] featureTypes;
+    //门诊 住院分类
+    protected String resourceType;
+    //外部系统编码 用于返回映射数据,如果sysCode为空或null,则返回kl_standard_info标准名称
+    protected String sysCode;
+
+    protected String normal  = "";
+    protected String chief  = "";
+    protected String symptom = "";
+    protected String vital = "";
+    protected String pasts = "";
+    protected String other = "";
+    //传染病史
+    protected String infectious = "";
+    //手术外伤史
+    protected String operation = "";
+    //过敏史
+    protected String allergy = "";
+    //接种史
+    protected String vaccination = "";
+    //个人史
+    protected String personal = "";
+    //婚育史
+    protected String marital = "";
+    //家族史
+    protected String family = "";
+    //月经史
+    protected String menstrual = "";
+    //化验文本输入
+    protected String lisString = "";
+    //pacs文本输入
+    protected String pacsString = "";
+    //疾病文本输入
+    protected String diagString = "";
+    //药品文本输入
+    protected String drugString = "";
+    //lis结构化数据
+    private List<PreResult> lis = new ArrayList<>();
+    //pacs结构化数据
+    private List<PreResult> pacs = new ArrayList<>();
+    //diag结构化数据
+    private List<PreResult> diag = new ArrayList<>();
+    //drug结构化数据
+    private List<PreResult> drug = new ArrayList<>();
+    //当前开单lis项目
+    protected List<PreResult> lisOrder = new ArrayList<>();
+    //当前开单pacs项目
+    protected List<PreResult> pacsOrder = new ArrayList<>();
+    //当前开单诊断项目
+    protected List<PreResult> diagOrder = new ArrayList<>();
+    //当前开单药品项目
+    protected List<PreResult> drugOrder = new ArrayList<>();
+    //当前开单手术项目
+    protected List<PreResult> operationOrder = new ArrayList<>();
+    //当前开单其他 预留
+    protected List<PreResult> otherOrder = new ArrayList<>();
+    //大数据推送诊断结果信息
+    protected List<FeatureRate> pushDiags = new ArrayList<>();
+    //选中诊断
+    private PreResult diseaseName = new PreResult();;
+    //量表
+    protected String scaleName = "";
+    //指标结果
+    protected String indications="";
+    //模型
+    protected String algorithmClassifyValue;
+    //推送条件
+    private Map<String, Map<String, String>> inputs = new HashMap<>(10, 0.8f);
+    //知识图谱推送条件
+    private Map<String, Map<String, String>> graphInputs = new HashMap<>(10, 0.8f);
+    //阴性 页面录入数据需要对结果过滤的集合
+    private Map<String, Map<String, String>> filters = new HashMap<>(10, 0.8f);
+    //满足规则的ID集合
+    private Map<String, List<Rule>> rules = new HashMap<>();
+    //特征推送走的模型 1:机器学习 2:朴素贝叶斯
+    private Integer algorithmType;
+    //规则类型 1:危机值提醒,2:开单合理性,3:管理评估,4:不良反应,5:药物推荐,6:异常值
+    private String ruleType;
+
+    public Integer getDisType() {
+        return disType;
+    }
+
+    public void setDisType(Integer disType) {
+        this.disType = disType;
+    }
+
+    public int getLength() {
+        return length;
+    }
+
+    public void setLength(int length) {
+        this.length = length;
+    }
+
+    public int getAge_start() {
+        return age_start;
+    }
+
+    public void setAge_start(int age_start) {
+        this.age_start = age_start;
+    }
+
+    public int getAge_end() {
+        return age_end;
+    }
+
+    public void setAge_end(int age_end) {
+        this.age_end = age_end;
+    }
+
+    public int getAge() {
+        return age;
+    }
+
+    public void setAge(int age) {
+        this.age = age;
+    }
+
+    public String getSex() {
+        return sex;
+    }
+
+    public void setSex(String sex) {
+        this.sex = sex;
+    }
+
+    public String getFeatureType() {
+        return featureType;
+    }
+
+    public void setFeatureType(String featureType) {
+        this.featureType = featureType;
+    }
+
+    public String[] getFeatureTypes() {
+        return featureTypes;
+    }
+
+    public void setFeatureTypes(String[] featureTypes) {
+        this.featureTypes = featureTypes;
+    }
+
+    public String getResourceType() {
+        return resourceType;
+    }
+
+    public void setResourceType(String resourceType) {
+        this.resourceType = resourceType;
+    }
+
+    public String getSysCode() {
+        return sysCode;
+    }
+
+    public void setSysCode(String sysCode) {
+        this.sysCode = sysCode;
+    }
+
+    public void setThreshold(String threshold) {
+        this.threshold = threshold;
+    }
+
+    public float getThreshold() { return Float.parseFloat(threshold); }
+
+    public Map<String, Map<String, String>> getInputs() {
+        return inputs;
+    }
+
+    public void setInputs(Map<String, Map<String, String>> inputs) {
+        this.inputs = inputs;
+    }
+
+    public String getNormal() {
+        return normal;
+    }
+
+    public void setNormal(String normal) {
+        this.normal = normal;
+    }
+
+    public String getChief() {
+        return chief;
+    }
+
+    public void setChief(String chief) {
+        this.chief = chief;
+    }
+
+    public String getSymptom() {
+        return symptom;
+    }
+
+    public void setSymptom(String symptom) {
+        this.symptom = symptom;
+    }
+
+    public String getVital() {
+        return vital;
+    }
+
+    public void setVital(String vital) {
+        this.vital = vital;
+    }
+
+    public String getPasts() {
+        return pasts;
+    }
+
+    public void setPasts(String pasts) {
+        this.pasts = pasts;
+    }
+
+    public String getOther() {
+        return other;
+    }
+
+    public void setOther(String other) {
+        this.other = other;
+    }
+
+    public String getScaleName() {
+        return scaleName;
+    }
+
+    public void setScaleName(String scaleName) {
+        this.scaleName = scaleName;
+    }
+
+    public String getAlgorithmClassifyValue() {
+        return algorithmClassifyValue;
+    }
+
+    public void setAlgorithmClassifyValue(String algorithmClassifyValue) {
+        this.algorithmClassifyValue = algorithmClassifyValue;
+    }
+
+    public Map<String, Map<String, String>> getFilters() {
+        return filters;
+    }
+
+    public void setFilters(Map<String, Map<String, String>> filters) {
+        this.filters = filters;
+    }
+
+    public List<FeatureRate> getPushDiags() {
+        return pushDiags;
+    }
+
+    public void setPushDiags(List<FeatureRate> pushDiags) {
+        this.pushDiags = pushDiags;
+    }
+
+    public String getIndications() {
+        return indications;
+    }
+
+    public void setIndications(String indications) {
+        this.indications = indications;
+    }
+
+    public Map<String, Map<String, String>> getGraphInputs() {
+        return graphInputs;
+    }
+
+    public void setGraphInputs(Map<String, Map<String, String>> graphInputs) {
+        this.graphInputs = graphInputs;
+    }
+
+    public Map<String, List<Rule>> getRules() {
+        return rules;
+    }
+
+    public void setRules(Map<String, List<Rule>> rules) {
+        this.rules = rules;
+    }
+
+    public Integer getAlgorithmType() {
+        return algorithmType;
+    }
+
+    public void setAlgorithmType(Integer algorithmType) {
+        this.algorithmType = algorithmType;
+    }
+
+    public String getRuleType() {
+        return ruleType;
+    }
+
+    public void setRuleType(String ruleType) {
+        this.ruleType = ruleType;
+    }
+
+    public String getMarriage() {
+        return marriage;
+    }
+
+    public void setMarriage(String marriage) {
+        this.marriage = marriage;
+    }
+
+    public String getInfectious() {
+        return infectious;
+    }
+
+    public void setInfectious(String infectious) {
+        this.infectious = infectious;
+    }
+
+    public String getOperation() {
+        return operation;
+    }
+
+    public void setOperation(String operation) {
+        this.operation = operation;
+    }
+
+    public String getAllergy() {
+        return allergy;
+    }
+
+    public void setAllergy(String allergy) {
+        this.allergy = allergy;
+    }
+
+    public String getVaccination() {
+        return vaccination;
+    }
+
+    public void setVaccination(String vaccination) {
+        this.vaccination = vaccination;
+    }
+
+    public String getPersonal() {
+        return personal;
+    }
+
+    public void setPersonal(String personal) {
+        this.personal = personal;
+    }
+
+    public String getMarital() {
+        return marital;
+    }
+
+    public void setMarital(String marital) {
+        this.marital = marital;
+    }
+
+    public String getFamily() {
+        return family;
+    }
+
+    public void setFamily(String family) {
+        this.family = family;
+    }
+
+    public String getMenstrual() {
+        return menstrual;
+    }
+
+    public void setMenstrual(String menstrual) {
+        this.menstrual = menstrual;
+    }
+
+    public String getLisString() {
+        return lisString;
+    }
+
+    public void setLisString(String lisString) {
+        this.lisString = lisString;
+    }
+
+    public String getPacsString() {
+        return pacsString;
+    }
+
+    public void setPacsString(String pacsString) {
+        this.pacsString = pacsString;
+    }
+
+    public List<PreResult> getLis() {
+        return lis;
+    }
+
+    public void setLis(List<PreResult> lis) {
+        this.lis = lis;
+    }
+
+    public List<PreResult> getPacs() {
+        return pacs;
+    }
+
+    public void setPacs(List<PreResult> pacs) {
+        this.pacs = pacs;
+    }
+
+    public List<PreResult> getDiag() {
+        return diag;
+    }
+
+    public void setDiag(List<PreResult> diag) {
+        this.diag = diag;
+    }
+
+    public List<PreResult> getDrug() {
+        return drug;
+    }
+
+    public void setDrug(List<PreResult> drug) {
+        this.drug = drug;
+    }
+
+    public String getDiagString() {
+        return diagString;
+    }
+
+    public void setDiagString(String diagString) {
+        this.diagString = diagString;
+    }
+
+    public String getDrugString() {
+        return drugString;
+    }
+
+    public void setDrugString(String drugString) {
+        this.drugString = drugString;
+    }
+
+    public List<PreResult> getLisOrder() {
+        return lisOrder;
+    }
+
+    public void setLisOrder(List<PreResult> lisOrder) {
+        this.lisOrder = lisOrder;
+    }
+
+    public List<PreResult> getPacsOrder() {
+        return pacsOrder;
+    }
+
+    public void setPacsOrder(List<PreResult> pacsOrder) {
+        this.pacsOrder = pacsOrder;
+    }
+
+    public List<PreResult> getDiagOrder() {
+        return diagOrder;
+    }
+
+    public void setDiagOrder(List<PreResult> diagOrder) {
+        this.diagOrder = diagOrder;
+    }
+
+    public List<PreResult> getDrugOrder() {
+        return drugOrder;
+    }
+
+    public void setDrugOrder(List<PreResult> drugOrder) {
+        this.drugOrder = drugOrder;
+    }
+
+    public List<PreResult> getOperationOrder() {
+        return operationOrder;
+    }
+
+    public void setOperationOrder(List<PreResult> operationOrder) {
+        this.operationOrder = operationOrder;
+    }
+
+    public List<PreResult> getOtherOrder() {
+        return otherOrder;
+    }
+
+    public void setOtherOrder(List<PreResult> otherOrder) {
+        this.otherOrder = otherOrder;
+    }
+
+    public PreResult getDiseaseName() {
+        return diseaseName;
+    }
+
+    public void setDiseaseName(PreResult diseaseName) {
+        this.diseaseName = diseaseName;
+    }
+}

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/Detail.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 /**
  * 指标的详细信息

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/Drugs.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import java.util.LinkedList;
 

+ 54 - 0
common-push/src/main/java/org/diagbot/common/push/bean/neo4j/Filnlly.java

@@ -0,0 +1,54 @@
+package org.diagbot.common.push.bean.neo4j;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class Filnlly {
+    private List<Indicators> adverseEvent;//不良反应
+    private List<Drugs> treatment;//具体用药
+    private List<TreatCate> treatCate;//规则匹配用药情况(单药、双药、胰岛素药)
+    private List<TreatCate> compli_cate;//并发症规则匹配用药情况
+    private List<Drugs> compli_treatment;//并发症具体用药
+
+
+
+    public List<Indicators> getAdverseEvent() {
+        return adverseEvent;
+    }
+
+    public void setAdverseEvent(List<Indicators> adverseEvent) {
+        this.adverseEvent = adverseEvent;
+    }
+
+    public List<Drugs> getTreatment() {
+        return treatment;
+    }
+
+    public void setTreatment(List<Drugs> treatment) {
+        this.treatment = treatment;
+    }
+
+    public List<TreatCate> getTreatCate() {
+        return treatCate;
+    }
+
+    public void setTreatCate(List<TreatCate> treatCate) {
+        this.treatCate = treatCate;
+    }
+
+    public List<TreatCate> getCompli_cate() {
+        return compli_cate;
+    }
+
+    public void setCompli_cate(List<TreatCate> compli_cate) {
+        this.compli_cate = compli_cate;
+    }
+
+    public List<Drugs> getCompli_treatment() {
+        return compli_treatment;
+    }
+
+    public void setCompli_treatment(List<Drugs> compli_treatment) {
+        this.compli_treatment = compli_treatment;
+    }
+}

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/FuzhenFilnlly.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import java.util.ArrayList;
 import java.util.Map;

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/Indicators.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import java.util.List;
 

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/MangementEvaluation.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import com.alibaba.fastjson.JSONObject;
 

+ 10 - 1
common-service/src/main/java/org/diagbot/common/javabean/MedicalIndication.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import java.util.List;
 
@@ -9,6 +9,7 @@ public class MedicalIndication {
     private String name;
     // 触发推送的规则
     private String rule;
+    private String hisName;
     private List<MedicalIndicationDetail> details;
 
     public String getName() {
@@ -19,6 +20,14 @@ public class MedicalIndication {
         this.name = name;
     }
 
+    public String getHisName() {
+        return hisName;
+    }
+
+    public void setHisName(String hisName) {
+        this.hisName = hisName;
+    }
+
     public String getRule() {
         return rule;
     }

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/MedicalIndicationDetail.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import com.alibaba.fastjson.JSONObject;
 

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/Medicition.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 /**
  * 药

+ 24 - 0
common-push/src/main/java/org/diagbot/common/push/bean/neo4j/MeditionDetail.java

@@ -0,0 +1,24 @@
+package org.diagbot.common.push.bean.neo4j;
+
+import java.util.List;
+
+public class MeditionDetail {
+    private String description;//双药治疗:双胍类+SGLT-2
+    private List<Drugs> treatment;//具体用药
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public List<Drugs> getTreatment() {
+        return treatment;
+    }
+
+    public void setTreatment(List<Drugs> treatment) {
+        this.treatment = treatment;
+    }
+}

+ 27 - 0
common-push/src/main/java/org/diagbot/common/push/bean/neo4j/Treat.java

@@ -0,0 +1,27 @@
+package org.diagbot.common.push.bean.neo4j;
+
+import java.util.List;
+
+/**
+ * 治疗返回
+ */
+public class Treat {
+    private List<Indicators> adverseEvent;//不良反应
+    private List<TreatDetail> treatmentPlan;//治疗方案
+
+    public List<Indicators> getAdverseEvent() {
+        return adverseEvent;
+    }
+
+    public void setAdverseEvent(List<Indicators> adverseEvent) {
+        this.adverseEvent = adverseEvent;
+    }
+
+    public List<TreatDetail> getTreatmentPlan() {
+        return treatmentPlan;
+    }
+
+    public void setTreatmentPlan(List<TreatDetail> treatmentPlan) {
+        this.treatmentPlan = treatmentPlan;
+    }
+}

+ 24 - 0
common-push/src/main/java/org/diagbot/common/push/bean/neo4j/TreatCate.java

@@ -0,0 +1,24 @@
+package org.diagbot.common.push.bean.neo4j;
+
+import java.util.List;
+
+public class TreatCate {
+    private String type;//单药,双药,胰岛素用药
+    private List<String> name;//具体药类组合
+
+    public String getType() {
+        return type;
+    }
+
+    public void setType(String type) {
+        this.type = type;
+    }
+
+    public List<String> getName() {
+        return name;
+    }
+
+    public void setName(List<String> name) {
+        this.name = name;
+    }
+}

+ 24 - 0
common-push/src/main/java/org/diagbot/common/push/bean/neo4j/TreatDetail.java

@@ -0,0 +1,24 @@
+package org.diagbot.common.push.bean.neo4j;
+
+import java.util.List;
+
+public class TreatDetail {
+    private String title;//糖尿病推荐用药
+    private List<MeditionDetail> meditionDetails;//集体治疗
+
+    public String getTitle() {
+        return title;
+    }
+
+    public void setTitle(String title) {
+        this.title = title;
+    }
+
+    public List<MeditionDetail> getMeditionDetails() {
+        return meditionDetails;
+    }
+
+    public void setMeditionDetails(List<MeditionDetail> meditionDetails) {
+        this.meditionDetails = meditionDetails;
+    }
+}

+ 396 - 0
common-push/src/main/java/org/diagbot/common/push/cache/ApplicationCacheUtil.java

@@ -0,0 +1,396 @@
+package org.diagbot.common.push.cache;
+
+import org.diagbot.common.push.bean.RelevantFeature;
+import org.diagbot.common.push.bean.ResultMappingFilter;
+import org.diagbot.common.push.bean.Rule;
+import org.diagbot.common.push.bean.RuleApp;
+import org.diagbot.nlp.participle.cfg.Configuration;
+import org.diagbot.nlp.participle.cfg.DefaultConfig;
+import org.diagbot.nlp.util.NlpCache;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class ApplicationCacheUtil {
+
+    //词库同义词定义
+    public static Map<String, Map<String, String>> standard_info_synonym_map = null;
+    //诊断科室衍射
+    public static Map<String, String> doc_result_mapping_diag_map = null;
+    //特征性别 年龄过滤等
+    public static Map<String, Map<String, ResultMappingFilter>> doc_result_mapping_filter_map = null;
+    //化验辅检体征 性别 年龄过滤等
+    public static Map<String, Map<String, ResultMappingFilter>> doc_result_mapping_lpvSex_filter_map = null;
+    // 规则
+    public static Map<String, List<Rule>> kl_rule_filter_map = null;
+    //危险值提醒
+    public static Map<String, RuleApp> kl_rule_app_filter_map = null;
+    //pacs关系抽取过滤
+    public static Map<String, Map<String, String>> kl_diagnose_detail_filter_map = null;
+    //朴素贝叶斯
+    public static Map<String, Map<String, Float>> doc_feature_naivebayes_prob_map = null;
+    //朴素贝叶斯规则过滤
+    public static Map<String, Map<String, Float>> relevant_feature_bayes_map = null;
+    //体征过滤对比表信息
+    public static Map<String, RelevantFeature> relevant_feature_map = null;
+
+    public static Map<String, Map<String, String>> getStandard_info_synonym_map() {
+        if (standard_info_synonym_map == null) {
+            standard_info_synonym_map = NlpCache.getStandard_info_synonym_map();
+        }
+        return standard_info_synonym_map;
+    }
+
+    public static Map<String, String> getDoc_result_mapping_diag_map() {
+        if (doc_result_mapping_diag_map == null) {
+            createDoc_result_mapping_diag_map();
+        }
+        return doc_result_mapping_diag_map;
+    }
+
+    public static Map<String, String> createDoc_result_mapping_diag_map() {
+        Configuration configuration = new DefaultConfig();
+        doc_result_mapping_diag_map = configuration.loadMapDict("bigdata_diag_2_dept.dict");
+        return doc_result_mapping_diag_map;
+    }
+
+    public static Map<String, Map<String, ResultMappingFilter>> getDoc_result_mapping_lpvSex_filter_map() {
+        if (doc_result_mapping_lpvSex_filter_map == null) {
+            createDoc_result_mapping_lpvSex_filter_map();
+        }
+        return doc_result_mapping_lpvSex_filter_map;
+    }
+
+    public static Map<String, Map<String, ResultMappingFilter>> getDoc_result_mapping_filter_map() {
+        if (doc_result_mapping_filter_map == null) {
+            createDoc_result_mapping_filter_map();
+        }
+        return doc_result_mapping_filter_map;
+    }
+
+    public static Map<String, Map<String, ResultMappingFilter>> createDoc_result_mapping_filter_map() {
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_sex_age_filter.dict");
+        String[] line_string;
+        List<ResultMappingFilter> resultMappingFilters = new ArrayList<>();
+        try {
+            for (int i = 0; i < fileContents.size(); i++) {
+                line_string = org.apache.commons.lang3.StringUtils.split(fileContents.get(i), "\\|");
+                if (line_string.length == 5) {
+                    ResultMappingFilter resultMappingFilter = new ResultMappingFilter();
+                    resultMappingFilter.setFeatureName(line_string[0]);
+                    resultMappingFilter.setFeatureType(line_string[1]);
+                    resultMappingFilter.setSex(line_string[2]);
+                    resultMappingFilter.setAgeStart(Integer.parseInt(line_string[3]));
+                    resultMappingFilter.setAgeEnd(Integer.parseInt(line_string[4]));
+                    resultMappingFilters.add(resultMappingFilter);
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        doc_result_mapping_filter_map = new HashMap<>();
+        Map<String, ResultMappingFilter> filterMap = null;
+        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
+            filterMap = doc_result_mapping_filter_map.get(resultMappingFilter.getFeatureType());
+            if (filterMap == null) {
+                filterMap = new HashMap<>();
+            }
+            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
+            doc_result_mapping_filter_map.put(resultMappingFilter.getFeatureType(), filterMap);
+        }
+        return doc_result_mapping_filter_map;
+    }
+
+    /**
+     * 化验辅检体征年龄性别过滤
+     *
+     * @return
+     */
+    public static Map<String, Map<String, ResultMappingFilter>> createDoc_result_mapping_lpvSex_filter_map() {
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_lpv_sex_age_filter.dict");
+        String[] line_string;
+        List<ResultMappingFilter> resultMappingFilters = new ArrayList<>();
+        try {
+            for (int i = 0; i < fileContents.size(); i++) {
+                line_string = org.apache.commons.lang3.StringUtils.split(fileContents.get(i), "\\|");
+                if (line_string.length == 5) {
+                    ResultMappingFilter resultMappingFilter = new ResultMappingFilter();
+                    resultMappingFilter.setFeatureName(line_string[0]);
+                    resultMappingFilter.setFeatureType(line_string[1]);
+                    resultMappingFilter.setSex(line_string[2]);
+                    resultMappingFilter.setAgeStart(Integer.parseInt(line_string[3]));
+                    resultMappingFilter.setAgeEnd(Integer.parseInt(line_string[4]));
+                    resultMappingFilters.add(resultMappingFilter);
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        doc_result_mapping_lpvSex_filter_map = new HashMap<>();
+        Map<String, ResultMappingFilter> filterMap = null;
+        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
+            filterMap = doc_result_mapping_lpvSex_filter_map.get(resultMappingFilter.getFeatureType());
+            if (filterMap == null) {
+                filterMap = new HashMap<>();
+            }
+            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
+            doc_result_mapping_lpvSex_filter_map.put(resultMappingFilter.getFeatureType(), filterMap);
+        }
+        return doc_result_mapping_lpvSex_filter_map;
+    }
+
+
+    public static Map<String, List<Rule>> getKl_rule_filter_map() {
+        if (kl_rule_filter_map == null) {
+            create_kl_rule_filter_map();
+        }
+        return kl_rule_filter_map;
+    }
+
+    public static void create_kl_rule_filter_map() {
+        kl_rule_filter_map = new HashMap<>();
+
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_rule_filter.dict");
+
+        List<Rule> rules = null;
+        for (String line : fileContents) {
+            String[] content = line.split("\\|", -1);
+            Rule rule = new Rule();
+            if (content.length == 13) {
+                rule.setId(content[0] == null ? "" : content[0]);
+                rule.setPub_name(content[1] == null ? "" : content[1]);
+                rule.setMin_operator(content[2] == null ? "" : content[2]);
+                rule.setMin_value(content[3] == null ? "" : content[3]);
+                rule.setMin_unit(content[4] == null ? "" : content[4]);
+                rule.setMax_operator(content[5] == null ? "" : content[5]);
+                rule.setMax_value(content[6] == null ? "" : content[6]);
+                rule.setMax_unit(content[7] == null ? "" : content[7]);
+                rule.setEq_operator(content[8] == null ? "" : content[8]);
+                rule.setEq_value(content[9] == null ? "" : content[9]);
+                rule.setEq_unit(content[10] == null ? "" : content[10]);
+                rule.setRemind(content[11] == null ? "" : content[11]);
+                rule.setSuffixInfo(content[12] == null ? "" : content[12]);
+                if (kl_rule_filter_map.get(rule.getPub_name()) == null) {
+                    rules = new ArrayList<>();
+                } else {
+                    rules = kl_rule_filter_map.get(rule.getPub_name());
+                }
+                rules.add(rule);
+                kl_rule_filter_map.put(rule.getPub_name(), rules);
+            }
+        }
+    }
+
+    public static Map<String, RuleApp> getKl_rule_app_filter_map() {
+        if (kl_rule_app_filter_map == null) {
+            create_kl_rule_app_filter_map();
+        }
+        return kl_rule_app_filter_map;
+    }
+
+    public static void create_kl_rule_app_filter_map() {
+        kl_rule_app_filter_map = new HashMap<>();
+
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_rule_app_filter.dict");
+
+        for (String line : fileContents) {
+            String[] content = line.split("\\|", -1);
+            RuleApp ruleApp = new RuleApp();
+            if (content.length == 4) {
+                ruleApp.setId(content[0] == null ? "" : content[0]);
+                ruleApp.setRuleIds(content[1] == null ? "" : content[1]);
+                ruleApp.setTypeId(content[2] == null ? "" : content[2]);
+                ruleApp.setRemind(content[3] == null ? "" : content[3]);
+                kl_rule_app_filter_map.put(ruleApp.getId(), ruleApp);
+            }
+        }
+    }
+
+    public static Map<String, Map<String, String>> getKl_diagnose_detail_filter_map() {
+        if (kl_diagnose_detail_filter_map == null) {
+            create_kl_diagnose_detail_filter_map();
+        }
+        return kl_diagnose_detail_filter_map;
+    }
+
+
+    public static void create_kl_diagnose_detail_filter_map() {
+        kl_diagnose_detail_filter_map = new HashMap<>();
+        Map<String, String> diagnoseDetailRelationMap = new HashMap<>();
+        Set<String> diagnoseDetailRelation = new HashSet<>();
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_diagnose_detail_filter.dict");
+        for (String line : fileContents) {
+            String[] content = line.split("\\|", -1);
+            String[] relations = content[1].split("、");
+            for (String relation : relations) {
+                if (diagnoseDetailRelation.add(relation)) {
+                    if (kl_diagnose_detail_filter_map.get(content[0]) == null) {
+                        diagnoseDetailRelationMap.put(relation, relation);
+                        kl_diagnose_detail_filter_map.put(content[0], diagnoseDetailRelationMap);
+                    } else {
+                        kl_diagnose_detail_filter_map.get(content[0]).put(relation, relation);
+                    }
+                }
+            }
+        }
+    }
+
+    public static Map<String, Map<String, Float>> getDoc_feature_naivebayes_prob_map() {
+        if (doc_feature_naivebayes_prob_map == null) {
+            create_doc_feature_naivebayes_prob_map();
+        }
+        return doc_feature_naivebayes_prob_map;
+    }
+
+    public static void create_doc_feature_naivebayes_prob_map() {
+        doc_feature_naivebayes_prob_map = new HashMap<>();
+        //<rdn,[feature...]> 存储每个rdn对应的特征List
+        Map<String, List<String>> featureMap = new HashMap<>();
+        List<String> featureList = null;
+        Configuration configuration = new DefaultConfig();
+        List<String> fileFeatureContents = configuration.readFileContents("bigdata_naivebayes_features.dict");
+        for (String line : fileFeatureContents) {
+            String[] content = line.split("\\|", -1);
+            if (featureMap.get(content[0]) == null) {
+                featureList = new ArrayList<>();
+                for (String feature : content[1].split(" ")) {
+                    featureList.add(feature);
+                }
+                featureMap.put(content[0], featureList);
+            }
+        }
+
+        //<rdn,diagnose> 存每个rdn对应疾病
+        Map<String, String> diagnoseMap = new HashMap<>();
+        //<diagnose,count> 存每个疾病的数量
+        Map<String, Integer> diagnoseCount = new HashMap<>();
+        List<String> fileDiagnoseContents = configuration.readFileContents("bigdata_naivebayes_diagnose.dict");
+        diagnoseCount.put("diagnoseCount", fileDiagnoseContents.size());
+        for (String line : fileDiagnoseContents) {
+            String[] content = line.split("\\|", -1);
+            if (diagnoseMap.get(content[0]) == null) {
+                diagnoseMap.put(content[0], content[1]);
+            }
+            if (diagnoseCount.get(content[1]) == null) {
+                diagnoseCount.put(content[1], 1);
+            } else {
+                diagnoseCount.put(content[1], diagnoseCount.get(content[1]) + 1);
+            }
+        }
+
+        Map<String, Map<String, Integer>> diagnose2featureCount = new HashMap<>();
+        Map<String, Integer> featureCount = new HashMap<>();
+        for (Map.Entry<String, String> diagnoseMapEntry : diagnoseMap.entrySet()) {
+            //featureMap -> <1000000_144 , [咳嗽,咳痰,1周,气管炎]>
+            if (featureMap.get(diagnoseMapEntry.getKey()) == null) {
+                continue;
+            }
+            for (String feature : featureMap.get(diagnoseMapEntry.getKey())) {
+                /**
+                 diagnoseMapEntry <1596386_9,鼻炎> -> <rdn,diagnose>
+                 如果疾病对应特征列表为空 diagnoseMapEntry.getValue()->疾病
+                 */
+                if (diagnose2featureCount.get(diagnoseMapEntry.getValue()) == null) {
+                    featureCount = new HashMap<>();
+                    //featureMap -> <1000000_144 , [咳嗽,咳痰,1周,气管炎]>
+                    if (featureCount.get(feature) == null) {
+                        featureCount.put(feature, 1);
+                    } else {
+                        featureCount.put(feature, featureCount.get(feature) + 1);
+                    }
+                    //疾病对应病历数
+                    featureCount.put("diagnoseCount", diagnoseCount.get(diagnoseMapEntry.getValue()));
+                    diagnose2featureCount.put(diagnoseMapEntry.getValue(), featureCount);
+                } else {
+                    if (diagnose2featureCount.get(diagnoseMapEntry.getValue()).get(feature) == null) {
+                        diagnose2featureCount.get(diagnoseMapEntry.getValue()).put(feature, 1);
+                    } else {
+                        diagnose2featureCount.get(diagnoseMapEntry.getValue())
+                                .put(feature, diagnose2featureCount.get(diagnoseMapEntry.getValue()).get(feature) + 1);
+                    }
+                }
+            }
+        }
+
+        Map<String, Float> prob = null;
+        for (Map.Entry<String, Map<String, Integer>> diagnose2featureCountEntry : diagnose2featureCount.entrySet()) {
+            prob = new HashMap<>();
+            //计算先验概率
+            float priorProb = (float) diagnose2featureCountEntry.getValue().get("diagnoseCount") / diagnoseCount.get("diagnoseCount");
+            prob.put("priorProb", priorProb);
+            //计算条件概率
+            for (Map.Entry<String, Integer> featuresCount : diagnose2featureCountEntry.getValue().entrySet()) {
+                float conditionProb = (float) featuresCount.getValue() / diagnose2featureCountEntry.getValue().get("diagnoseCount");
+                prob.put(featuresCount.getKey(), conditionProb);
+            }
+            doc_feature_naivebayes_prob_map.put(diagnose2featureCountEntry.getKey(), prob);
+        }
+    }
+
+    public static Map<String, Map<String,Float>> getRelevant_feature_map() {
+        if (relevant_feature_bayes_map == null) {
+            createRelevant_feature_map();
+        }
+        return relevant_feature_bayes_map;
+    }
+
+    public static Map<String, Map<String,Float>> createRelevant_feature_map() {
+        relevant_feature_bayes_map = new HashMap<>();
+        Map<String,Float> relevantFeatureProb = null;
+        Configuration configuration = new DefaultConfig();
+        List<String> relevantFeatureList = configuration.readFileContents("bigdata_relevant_feature.dict");
+        for (String relevantFeature:relevantFeatureList) {
+            String[] content = relevantFeature.split("\\|", -1);
+            if (relevant_feature_bayes_map.get(content[0]) == null){
+                relevantFeatureProb = new HashMap<>();
+                relevantFeatureProb.put(content[1],0.00f);
+                relevant_feature_bayes_map.put(content[0],relevantFeatureProb);
+            } else {
+                relevant_feature_bayes_map.get(content[0]).put(content[1],0.00f);
+            }
+        }
+        return relevant_feature_bayes_map;
+    }
+
+    /**
+     * 体征过滤获取对比表信息
+     *
+     * @return
+     */
+    public static Map<String, RelevantFeature> get_relevant_feature() {
+        if (relevant_feature_map == null) {
+            create_get_relevant_feature();
+        }
+        return relevant_feature_map;
+    }
+
+    public static void create_get_relevant_feature(){
+        relevant_feature_map = new HashMap<>();
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_diagnose_feature_filter.dict");
+        for (String line : fileContents) {
+            String[] content = line.split("\\|", -1);
+            RelevantFeature relevantFeature = new RelevantFeature();
+            if (content.length == 6) {
+                relevantFeature.setId(content[0] == null ? "" : content[0]);
+                relevantFeature.setDiagnose(content[1] == null ? "" : content[1]);
+                relevantFeature.setFeature(content[2] == null ? "" : content[2]);
+                relevantFeature.setFeature_type(content[3] == null ? "" : content[3]);
+                relevantFeature.setFind_suspect_diagnose(content[4] == null ? "" : content[4]);
+                relevantFeature.setValue_type(content[5] == null ? "" : content[5]);
+                relevant_feature_map.put(relevantFeature.getDiagnose(),relevantFeature);
+            }
+        }
+    }
+}

+ 188 - 60
common-push/src/main/java/org/diagbot/common/push/cache/CacheFileManager.java

@@ -1,5 +1,6 @@
 package org.diagbot.common.push.cache;
 
+import org.diagbot.common.push.util.CryptUtil;
 import org.diagbot.pub.jdbc.MysqlJdbc;
 import org.diagbot.pub.utils.PropertiesUtil;
 import org.diagbot.pub.utils.security.EncrypDES;
@@ -73,7 +74,7 @@ public class CacheFileManager {
         try {
             EncrypDES encrypDES = new EncrypDES();
             //所有词典库 不能用concat_group 大小写不区分
-            String sql = "select l_1.name l_1_name, l_1.type_id type_id, l_2.name l_2_name, l_1.concept_id from kl_library_info l_1\n" +
+            String sql = "select l_1.name l_1_name, l_1.type_id type_id, l_2.name l_2_name, kc.lib_name from kl_library_info l_1\n" +
                     "                    left join kl_library_info l_2 on l_1.concept_id = l_2.concept_id and l_2.is_concept = 1\n" +
                     "left join kl_concept kc on l_1.concept_id = kc.id\n" +
                     "where kc.is_deleted = 'N' ";
@@ -87,9 +88,11 @@ public class CacheFileManager {
             String r2;
             String r3;
             while (rs.next()) {
-                r1 = rs.getString(1);
+//                r1 = rs.getString(1);
+                r1 = CryptUtil.decrypt_char(rs.getString(1));
                 r2 = rs.getString(2);
-                r3 = rs.getString(4);
+                r3 = CryptUtil.decrypt_char(rs.getString(4));
+//                r3 = rs.getString(4);
                 if (idMap.get(r1) == null) {
                     idMap.put(r1, r2);
                     nameMap.put(r1, r3);
@@ -121,8 +124,10 @@ public class CacheFileManager {
 
             fw = new FileWriter(path + "synonym.dict");
             while (rs.next()) {
-                r1 = rs.getString(1);
-                r2 = rs.getString(2);
+//                r1 = rs.getString(1);
+//                r2 = rs.getString(2);
+                r1 = CryptUtil.decrypt_char(rs.getString(1));
+                r2 = CryptUtil.decrypt_char(StringUtils.isEmpty(rs.getString(2))?"":rs.getString(2));
                 r3 = rs.getString(3);
                 fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3));
                 fw.write("\n");
@@ -164,6 +169,18 @@ public class CacheFileManager {
 
             fw = new FileWriter(path + "classify.dict");
             fw.close();
+
+            sql = "select name, type from kl_library_info_pacs order by name";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "pacs-tc.dict");
+            while (rs.next()) {
+                r1 = rs.getString(1);
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1 + "|9|"+ r2 + "|" + r1));
+                fw.write("\n");
+            }
+            fw.close();
         } catch (IOException ioe) {
             ioe.printStackTrace();
         } catch (SQLException sqle) {
@@ -191,11 +208,12 @@ public class CacheFileManager {
             st = conn.createStatement();
             rs = st.executeQuery(sql);
             FileWriter fw = new FileWriter(path + "graph_diag_classify.dict");
-            String  r2 ,r3;
+            String r2, r3;
             while (rs.next()) {
-                r2 = rs.getString(1);//疾病名称
+//                r2 = rs.getString(1);//疾病名称
+                r2 = CryptUtil.decrypt_char(rs.getString(1));//疾病名称
                 r3 = rs.getString(2);//疾病类别
-                fw.write(encrypDES.encrytor(r2+ "|" + r3));
+                fw.write(encrypDES.encrytor(r2 + "|" + r3));
                 fw.write("\n");
             }
             fw.close();
@@ -209,11 +227,12 @@ public class CacheFileManager {
             fw = new FileWriter(path + "graph_sex_age_filter.dict");
             String r1, r4, r5;
             while (rs.next()) {
-                r1 = rs.getString(1);//术语名称
+//                r1 = rs.getString(1);//术语名称
+                r1 = CryptUtil.decrypt_char(rs.getString(1));//术语名称
                 r2 = rs.getString(3);//sexType 1:男 2:女 3:都可以
                 r3 = rs.getString(4);//min_age
                 r4 = rs.getString(5);//max_age
-                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3+ "|" + r4));
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3 + "|" + r4));
                 fw.write("\n");
             }
             fw.close();
@@ -230,14 +249,16 @@ public class CacheFileManager {
             rs = st.executeQuery(sql);
             fw = new FileWriter(path + "graph_vital_convert.dict");
             while (rs.next()) {
-                r1 = rs.getString(1);//体征结果
-                r2 = rs.getString(2);//体征指标
+//                r1 = rs.getString(1);//体征结果
+//                r2 = rs.getString(2);//体征指标
+                r1 = CryptUtil.decrypt_char(rs.getString(1));//体征结果
+                r2 = CryptUtil.decrypt_char(rs.getString(2));//体征指标
                 fw.write(encrypDES.encrytor(r1 + "|" + r2));
                 fw.write("\n");
             }
             fw.close();
             //疾病科室信息
-            sql = "SELECT k1.lib_name diag_name, k2.lib_name dept_name FROM kl_concept_common kcc, kl_concept k1, kl_concept k2 " +
+            sql = "SELECT k1.lib_name diag_name, k2.lib_name dept_name FROM kl_disease kcc, kl_concept k1, kl_concept k2 " +
                     "where kcc.concept_id = k1.id and kcc.dept_id = k2.id " +
                     "and k1.lib_type = 18 and kcc.dept_id  is not null";
             st = conn.createStatement();
@@ -258,7 +279,7 @@ public class CacheFileManager {
                 r1 = rs.getString(1);
                 r2 = rs.getString(2);
                 r3 = rs.getString(3);
-                fw.write(encrypDES.encrytor(r1+ "|" + r2+ "|" + r3));
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3));
                 fw.write("\n");
             }
             fw.close();
@@ -281,7 +302,7 @@ public class CacheFileManager {
         try {
             EncrypDES encrypDES = new EncrypDES();
             //疾病科室
-            String sql = "SELECT k1.lib_name diag_name, k2.lib_name dept_name FROM kl_concept_common kcc, kl_concept k1, kl_concept k2 " +
+            String sql = "SELECT k1.lib_name diag_name, k2.lib_name dept_name FROM kl_disease kcc, kl_concept k1, kl_concept k2 " +
                     "where kcc.concept_id = k1.id and kcc.dept_id = k2.id " +
                     "and k1.lib_type = 18 and kcc.dept_id  is not null";
             st = conn.createStatement();
@@ -306,7 +327,8 @@ public class CacheFileManager {
 
             String r1, r2, r3, r4, r5;
             while (rs.next()) {
-                r1 = rs.getString(1);
+//                r1 = rs.getString(1);
+                r1 = CryptUtil.decrypt_char(rs.getString(1));
                 r2 = rs.getString(2);
                 r3 = rs.getString(3);
                 r4 = rs.getString(4);
@@ -325,20 +347,20 @@ public class CacheFileManager {
 
             fw = new FileWriter(path + "bigdata_value_analyze.dict");
             while (rs.next()) {
-                fw.write(encrypDES.encrytor(rs.getString(1)));
+                fw.write(encrypDES.encrytor(CryptUtil.decrypt_char(rs.getString(1))));
                 fw.write("\n");
             }
             fw.close();
 
             //规则过滤信息
-            sql = "SELECT set_name, idx_name, min_operator, min_value, min_unit, max_operator, " +
-                    " max_value, max_unit, eq_operator, eq_value, eq_unit, remind FROM kl_rule ";
+            sql = "SELECT id, pub_name, min_operator, min_value, min_unit, max_operator, max_value, " +
+                    "max_unit, eq_operator, eq_value, eq_unit, remind,suffix_info FROM kl_rule_pub";
             st = conn.createStatement();
             rs = st.executeQuery(sql);
-            fw = new FileWriter(path + "rule_filter.dict");
-            String r6, r7, r8, r9, r10, r11, r12;
+            fw = new FileWriter(path + "bigdata_rule_filter.dict");
+            String r6, r7, r8, r9, r10, r11, r12,r13;
             while (rs.next()) {
-                r1 = rs.getString(1);
+                r1 = String.valueOf(rs.getInt(1));
                 r2 = rs.getString(2);
                 r3 = rs.getString(3);
                 r4 = rs.getString(4);
@@ -350,63 +372,169 @@ public class CacheFileManager {
                 r10 = rs.getString(10);
                 r11 = rs.getString(11);
                 r12 = rs.getString(12);
-                fw.write(encrypDES.encrytor(r1+ "|" + r2 + "|" + r3 + "|" + r4 + "|" + r5
-                        + "|" + r6 + "|" + r7 + "|" + r8 + "|" + r9 + "|" + r10 + "|" + r11 + "|" + r12));
+                r13 = rs.getString(13);
+                r1 = StringUtils.isEmpty(r1) ? "" : r1;
+                r2 = StringUtils.isEmpty(r2) ? "" : r2;
+                r3 = StringUtils.isEmpty(r3) ? "" : r3;
+                r4 = StringUtils.isEmpty(r4) ? "" : r4;
+                r5 = StringUtils.isEmpty(r5) ? "" : r5;
+                r6 = StringUtils.isEmpty(r6) ? "" : r6;
+                r7 = StringUtils.isEmpty(r7) ? "" : r7;
+                r8 = StringUtils.isEmpty(r8) ? "" : r8;
+                r9 = StringUtils.isEmpty(r9) ? "" : r9;
+                r10 = StringUtils.isEmpty(r10) ? "" : r10;
+                r11 = StringUtils.isEmpty(r11) ? "" : r11;
+                r12 = StringUtils.isEmpty(r12) ? "" : r12;
+                r13 = StringUtils.isEmpty(r13) ? "" : r13;
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3 + "|" + r4 + "|" + r5
+                        + "|" + r6 + "|" + r7 + "|" + r8 + "|" + r9 + "|" + r10 + "|" + r11
+                        + "|" + r12 + "|" + r13));
+                fw.write("\n");
+            }
+            fw.close();
+
+            sql = "SELECT id, rule_id, type_id, remind FROM kl_rule_app";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_rule_app_filter.dict");
+            while (rs.next()) {
+                r1 = String.valueOf(rs.getInt(1));
+                r2 = rs.getString(2);
+                r3 = rs.getString(3);
+                r4 = rs.getString(4);
+                r1 = StringUtils.isEmpty(r1) ? "" : r1;
+                r2 = StringUtils.isEmpty(r2) ? "" : r2;
+                r3 = StringUtils.isEmpty(r3) ? "" : r3;
+                r4 = StringUtils.isEmpty(r4) ? "" : r4;
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3 + "|" + r4));
+                fw.write("\n");
+            }
+            fw.close();
+
+            sql = "SELECT type,relation FROM `kl_diagnose_detail` WHERE type = 4 AND LENGTH(relation) > 0 GROUP BY relation";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_diagnose_detail_filter.dict");
+            while (rs.next()) {
+                r1 = String.valueOf(rs.getInt(1));
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1+ "|" + r2));
+                fw.write("\n");
+            }
+            fw.close();
+
+            sql = "SELECT rdn, GROUP_CONCAT(feature_name ORDER BY sn SEPARATOR ' ') AS features FROM doc_feature WHERE feature_type = 9 GROUP BY rdn;";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_naivebayes_features.dict");
+            while (rs.next()) {
+                r1 = rs.getString(1);
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1+ "|" + r2));
                 fw.write("\n");
             }
             fw.close();
 
-//            //规则过滤信息
-//            sql = "SELECT idx_name, set_name, set_status, min_value, max_value, standard_value, " +
-//                    " , unit, concept_text, crisis_status, remind FROM kl_rule_new ";
-//            st = conn.createStatement();
-//            rs = st.executeQuery(sql);
-//            fw = new FileWriter(path + "bigdata_rule_filter.dict");
-//            String r10;
-//            while (rs.next()) {
+            sql = "select rdn, feature_name as diagnose from doc_feature where feature_type=2";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_naivebayes_diagnose.dict");
+            while (rs.next()) {
+                r1 = rs.getString(1);
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1+ "|" + r2));
+                fw.write("\n");
+            }
+            fw.close();
+
+            sql = "SELECT diagnose,feature FROM doc_relevant_feature;";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_relevant_feature.dict");
+            while (rs.next()) {
+                r1 = rs.getString(1);
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1+ "|" + r2));
+                fw.write("\n");
+            }
+            fw.close();
+
+            //化验辅检体征性别年龄
+            sql = "SELECT k1.lib_name, k1.lib_type, IFNULL(kcc.sex_type,3) sex_type, IFNULL(kcc.min_age, 0) min_age, IFNULL(kcc.max_age,200)  max_age\n" +
+                    "from kl_concept k1 LEFT JOIN kl_concept_common kcc on kcc.concept_id = k1.id \n" +
+                    "where  k1.lib_type in (1, 18,12,16,33,35)\n" +
+                    "AND k1.is_deleted = 'N'\n";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_lpv_sex_age_filter.dict");//化验辅检体征相关文件
+            while (rs.next()) {
 //                r1 = rs.getString(1);
-//                r2 = rs.getString(2);
-//                r3 = rs.getString(3);
-//                r4 = rs.getString(4);
-//                r5 = rs.getString(5);
-//                r6 = rs.getString(6);
-//                r7 = rs.getString(7);
-//                r8 = rs.getString(8);
-//                r9 = rs.getString(9);
-//                r10 = rs.getString(10);
-//                r1 = StringUtils.isEmpty(r1)?"":r1;
-//                r2 = StringUtils.isEmpty(r1)?"":r2;
-//                r3 = StringUtils.isEmpty(r1)?"":r3;
-//                r4 = StringUtils.isEmpty(r1)?"":r4;
-//                r5 = StringUtils.isEmpty(r1)?"":r5;
-//                r6 = StringUtils.isEmpty(r1)?"":r6;
-//                r7 = StringUtils.isEmpty(r1)?"":r7;
-//                r8 = StringUtils.isEmpty(r1)?"":r8;
-//                r9 = StringUtils.isEmpty(r1)?"":r9;
-//                r10 = StringUtils.isEmpty(r1)?"":r10;
-//                fw.write(encrypDES.encrytor(r1+ "|" + r2 + "|" + r3 + "|" + r4 + "|" + r5
-//                        + "|" + r6 + "|" + r7 + "|" + r8 + "|" + r9 + "|" + r10));
-//                fw.write("\n");
-//            }
-//            fw.close();
+                r1 = CryptUtil.decrypt_char(rs.getString(1));
+                r2 = rs.getString(2);
+                r3 = rs.getString(3);
+                r4 = rs.getString(4);
+                r5 = rs.getString(5);
+                if ("18".equals(r2)) {//诊断
+                    r2 = "2";
+                }
+                if ("12".equals(r2)) {//化验
+                    r2 = "4";
+                }
+                if ("16".equals(r2)) {//辅检
+                    r2 = "5";
+                }
+                if ("33".equals(r2) | "35".equals(r2)) {//体征
+                    r2 = "3";
+                }
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3 + "|" + r4 + "|" + r5));
+                fw.write("\n");
+            }
+            fw.close();
+
+            //特征提取过滤参照表信息
+            sql = "SELECT id,diagnose,feature,feature_type,value_type,find_suspect_diagnose FROM `doc_relevant_feature`";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_diagnose_feature_filter.dict");
+            while (rs.next()) {
+                r1 = String.valueOf(rs.getInt(1));
+                r2 = rs.getString(2);
+                r3 = rs.getString(3);
+                r4 = rs.getString(4);
+                r5 = rs.getString(5);
+                r6 = rs.getString(6);
+                r1 = StringUtils.isEmpty(r1) ? "" : r1;
+                r2 = StringUtils.isEmpty(r2) ? "" : r2;
+                r3 = StringUtils.isEmpty(r3) ? "" : r3;
+                r4 = StringUtils.isEmpty(r4) ? "" : r4;
+                r5 = StringUtils.isEmpty(r5) ? "" : r5;
+                r6 = StringUtils.isEmpty(r6) ? "" : r6;
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3 + "|" + r4 + "|" + r5
+                        + "|" + r6 ));
+                fw.write("\n");
+            }
+            fw.close();
+
         } catch (IOException ioe) {
             ioe.printStackTrace();
         } catch (SQLException sqle) {
             sqle.printStackTrace();
-        }  catch (Exception e) {
+        } catch (Exception e) {
             e.printStackTrace();
         } finally {
             nlpJdbc.close(rs, st, conn);
         }
     }
 
-    private List<Map.Entry<String, String>> rsToMap(ResultSet rs, boolean isJoin) throws SQLException{
+    private List<Map.Entry<String, String>> rsToMap(ResultSet rs, boolean isJoin) throws SQLException {
         String r1 = "";
         String r2 = "";
         Map<String, String> libraryMap = new HashMap<>(10);
         while (rs.next()) {
-            r1 = rs.getString(1);
-            r2 = rs.getString(2);
+//            r1 = rs.getString(1);
+//            r2 = rs.getString(2);
+            r1 = CryptUtil.decrypt_char(rs.getString(1));
+            r2 = CryptUtil.decrypt_char(rs.getString(2));
             if (libraryMap.get(r1) == null) {
                 libraryMap.put(r1, r2);
             } else if (isJoin && libraryMap.get(r1) != null) {

+ 1 - 1
graph/src/main/java/org/diagbot/graph/util/CacheUtil.java

@@ -1,4 +1,4 @@
-package org.diagbot.graph.util;
+package org.diagbot.common.push.cache;
 
 import org.apache.commons.lang3.StringUtils;
 import org.diagbot.nlp.participle.cfg.Configuration;

+ 106 - 43
common-push/src/main/java/org/diagbot/common/push/filter/ClassifyDiag.java

@@ -2,9 +2,10 @@ package org.diagbot.common.push.filter;
 
 import com.alibaba.fastjson.JSONObject;
 import org.apache.commons.lang3.StringUtils;
-import org.diagbot.common.work.FeatureRate;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.graph.util.CacheUtil;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.cache.CacheUtil;
+import org.diagbot.nlp.relation.module.Lis;
 
 import java.text.DecimalFormat;
 import java.util.*;
@@ -21,41 +22,13 @@ public class ClassifyDiag {
     //每个诊断所在的级别缓存
     Map<String, Integer> diagClassifyJiCache = CacheUtil.getDiagClassifyJiCache();
 
-   /* public static void main(String[] args) {
-
-        List<String> disList = new LinkedList<>();
-        String[] disArray = {"急性胰腺炎","冠心病","急性冠状动脉综合征","急性非ST段抬高型心肌梗死","急性ST段抬高型心肌梗死"
-        ,"三度房室传导阻滞","心力衰竭","急性心力衰竭"};
-         disList = Arrays.asList(disArray);
-        System.out.println("输入的诊断"+disList);
-        List<FeatureRate> inintFeature = new ArrayList<>();
-        Double inintNumber = 0.95;
-        String rate = "";
-        for (String dis:disList) {
-            FeatureRate featureRate = new FeatureRate();
-            featureRate.setFeatureName(dis);
-            inintNumber = inintNumber - 0.1;
-            rate = String.valueOf(inintNumber);
-            featureRate.setRate(rate);
-            inintFeature.add(featureRate);
-        }
-
-        ClassifyDiag classifyDiag = new ClassifyDiag();
-        List<FeatureRate> classify = classifyDiag.diagClassify(inintFeature);
-        System.out.println("hao hai you");
-        for (FeatureRate d:classify) {
-            System.out.println(d.getFeatureName()+"\t"+d.getRate()+"\t"+d.getExtraProperty());
-        }
-
-    }*/
-
     /**
      * 根据诊断依据规则过滤诊断
      * @param graphResponseData 把过滤的诊断包装在这个对象里
      * @param graphFeatures 过滤前的诊断结果
      * @return 返回过滤后的诊断结果
      */
-    public List<FeatureRate> filterDiag(ResponseData graphResponseData,List<FeatureRate> graphFeatures){
+    public List<FeatureRate> filterDiag(ResponseData graphResponseData, List<FeatureRate> graphFeatures){
         //根据诊断依据规则过滤掉的诊断列表
         List<String> excludeDiag = graphResponseData.getExcludeDiag();
         //将需要排除的诊断从列表中删除
@@ -81,6 +54,7 @@ public class ClassifyDiag {
     public List<FeatureRate> diagClassify(List<FeatureRate> updateFeatures){
         List<FeatureRate> finalDiagList = new LinkedList<>();//最终返回
         List<String> highDiagList = new LinkedList<>();//警惕集合
+        List<String> diffDiagList = new LinkedList<>();//鉴别诊断集合
         List<String> queDiagList = new LinkedList<>();//确诊集合
         List<String> bigDiagList = new LinkedList<>();//可能诊断集合
         if(updateFeatures != null && updateFeatures.size()>0){
@@ -89,13 +63,11 @@ public class ClassifyDiag {
                 String desc = featureRate.getDesc();
                 Map<String,Object> d = new HashMap<>();
                 if(desc != null){
-                    JSONObject jsonObject = JSONObject.parseObject(desc);
-                    d = jsonObject;
-                    if(d.keySet().size() == 1 && "警惕".equals(d.keySet().toArray()[0])){
-                        highDiagList.add(featureName);
-                    }else {
-                        queDiagList.add(featureName);
-                    }
+                   if(desc.contains("确诊") || desc.contains("拟诊")){
+                       queDiagList.add(featureName);
+                   }else {
+                       highDiagList.add(featureName);
+                   }
                 }else {
                     bigDiagList.add(featureName);
                 }
@@ -107,7 +79,18 @@ public class ClassifyDiag {
                 FeatureRate featureRate = updateFeatures.get(j);
                 String featureName = featureRate.getFeatureName();
                 int i = highDiagList.indexOf(featureName);
-                if(i >= 0){
+                if(i >= 0 && featureRate.getDesc() != null){
+                    finalDiagList.add(featureRate);
+                }
+            }
+        }
+        //再把鉴别诊断加进去
+        if(diffDiagList.size()>0){
+            for(int j =0;j<updateFeatures.size();j++){
+                FeatureRate featureRate = updateFeatures.get(j);
+                String featureName = featureRate.getFeatureName();
+                int i = diffDiagList.indexOf(featureName);
+                if(i >= 0 && featureRate.getDesc() != null){
                     finalDiagList.add(featureRate);
                 }
             }
@@ -121,12 +104,16 @@ public class ClassifyDiag {
         System.out.println("图谱归一前数据 :"+queDiagList);
         if(queDiagList != null && queDiagList.size()>0){
             //图谱归一 ,图谱sign =0,大数据sign = 1
-            queSet = this.diagProcess(queDiagList,0);
+            List<String> que = this.processQue(updateFeatures, queDiagList);
+            queSet = this.diagProcess(que,0);
             System.out.println("图谱归一后的数据    :"+queSet);
             if(queSet != null && queSet.size()>0){
                 for (String queDis:queSet) {
                     if(queDiagList.indexOf(queDis)>=0){ //可以找到,就取出来,用原来的
                         FeatureRate feature = this.getFeature(updateFeatures, queDis);
+//                        if(feature.getDesc().contains("拟诊")){
+                            feature.setDesc(feature.getDesc());
+//                        }
                         feature.setExtraProperty(diagDepartCache.get(queDis));
                         finalDiagList.add(feature);
                     }else {
@@ -186,6 +173,8 @@ public class ClassifyDiag {
                             bigDataIDiagList.add(featureName);
                         }else if("Ⅱ".equals(s)){
                             bigDataIIDiagList.add(featureName);
+                        }else {
+                            bigDataIDiagList.add(featureName);
                         }
                     }else {
                         bigDataIDiagList.add(featureName);
@@ -193,13 +182,23 @@ public class ClassifyDiag {
                 }
             }
         }
-        finalDiagList.addAll(neoDiagList);
+//        finalDiagList.addAll(neoDiagList);
         finalDiagList.addAll(bigDataIDiagList);
         finalDiagList.addAll(bigDataIIDiagList);
+        if(neoDiagList !=null && neoDiagList.size()>0){
+            for (String diag:neoDiagList) {
+                for (FeatureRate feature:updateFeatureRates) {
+                    if(diag.equals(feature.getFeatureName())&& "neo4j".equals(feature.getSource())){
+                        finalDiagFeature.add(feature);
+                        break;
+                    }
+                }
+            }
+        }
         if(finalDiagList != null && finalDiagList.size()>0){
             for (String diag:finalDiagList){
                 for (FeatureRate f:updateFeatureRates) {
-                    if(diag.equals(f.getFeatureName())){
+                    if(diag.equals(f.getFeatureName())&& f.getSource()==null){
                         finalDiagFeature.add(f);
                         break;
                     }
@@ -223,6 +222,13 @@ public class ClassifyDiag {
                 String s = diagClassifyCache.get(dis);
                 if(s != null){
                     queAll.add(s);
+                    List<Object> key = this.getKey(diagClassifyCache, s);
+                    if(key.size()>0){
+                        for (Object o:key
+                             ) {
+                            queAll.add(o.toString());
+                        }
+                    }
                 }
                 queAll.add(dis);
                 List<Object> key = this.getKey(diagClassifyCache, dis);
@@ -520,5 +526,62 @@ public class ClassifyDiag {
         }
         return arrayList;
     }
+    //归一有确诊的诊断
+    public  List<String> processQue(List<FeatureRate> updateFeatures,List<String>queList){
+        List<String> finallyQue = new LinkedList<>();
+        List<String> que = new ArrayList<>();
+        List<String> ni = new ArrayList<>();
+        for (String qd:queList) {
+            for (FeatureRate f:updateFeatures) {
+                if("neo4j".equals(f.getSource()) && qd.equals(f.getFeatureName())){
+                    if(f.getDesc().contains("确诊")){
+                        que.add(qd);
+                    }else if(f.getDesc().contains("拟诊")) {
+                        ni.add(qd);
+                    }
+                }
+            }
+        }
+        if(que != null && que.size()>0){
+            for (String q:que) {
+                Set<String> classifySet = new HashSet<>();
+                String s = diagClassifyCache.get(q);
+                if(StringUtils.isNotEmpty(s)){
+                    classifySet.add(s);
+                    List<Object> key = this.getKey(diagClassifyCache, s);
+                    if(key != null && key.size()>0){
+                        for (Object o:key) {
+                            classifySet.add(o.toString());
+                            List<Object> key1 = this.getKey(diagClassifyCache, o.toString());
+                            if(key1 != null && key1.size()>0){
+                                for (Object f:key1
+                                     ) {
+                                    classifySet.add(f.toString());
+                                }
+                            }
+                        }
+                    }
+                }
+                List<Object> key = this.getKey(diagClassifyCache, q);
+                if(key != null && key.size()>0){
+                    for (Object o:key) {
+                        classifySet.add(o.toString());
+                    }
+                }
+                if(classifySet != null && classifySet.size()>0){
+                    for (String sq:classifySet
+                         ) {
+                        if(ni.indexOf(sq) >= 0){
+                            ni.remove(sq);
+                        }
+                    }
+                }
+            }
+        }
+        finallyQue.addAll(que);
+        finallyQue.addAll(ni);
+        return finallyQue;
+
+    }
 
 }

+ 0 - 79
common-push/src/main/java/org/diagbot/common/push/filter/PreResult.java

@@ -1,79 +0,0 @@
-package org.diagbot.common.push.filter;
-
-/**
- * Created by louhr on 2019/8/31.
- */
-public class PreResult {
-    private String detailName;
-    private String uniqueName;
-    private Double maxValue;
-    private Double minValue;
-    private String name;
-    private String otherValue;
-    private String units;
-    private Double value;
-
-    public String getDetailName() {
-        return detailName;
-    }
-
-    public void setDetailName(String detailName) {
-        this.detailName = detailName;
-    }
-
-    public String getUniqueName() {
-        return uniqueName;
-    }
-
-    public void setUniqueName(String uniqueName) {
-        this.uniqueName = uniqueName;
-    }
-
-    public Double getMaxValue() {
-        return maxValue;
-    }
-
-    public void setMaxValue(Double maxValue) {
-        this.maxValue = maxValue;
-    }
-
-    public Double getMinValue() {
-        return minValue;
-    }
-
-    public void setMinValue(Double minValue) {
-        this.minValue = minValue;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public String getOtherValue() {
-        return otherValue;
-    }
-
-    public void setOtherValue(String otherValue) {
-        this.otherValue = otherValue;
-    }
-
-    public String getUnits() {
-        return units;
-    }
-
-    public void setUnits(String units) {
-        this.units = units;
-    }
-
-    public Double getValue() {
-        return value;
-    }
-
-    public void setValue(Double value) {
-        this.value = value;
-    }
-}

+ 0 - 45
common-push/src/main/java/org/diagbot/common/push/filter/PretreatmentFilter.java

@@ -1,45 +0,0 @@
-package org.diagbot.common.push.filter;
-
-
-import org.diagbot.common.push.filter.pretreat.Pretreatment;
-import org.diagbot.common.push.filter.pretreat.PretreatmentSymptom;
-import org.diagbot.common.push.filter.pretreat.PretreatmentVital;
-import org.diagbot.common.work.SearchData;
-import org.springframework.util.StringUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class PretreatmentFilter {
-    public void crisisFilter(SearchData searchData) throws java.io.IOException {
-        //将数值类型内容全部转换为标准术语  依据kl_rule提供规则
-
-        List<PreResult> allPreResultList = new ArrayList<>();
-        //症状数据
-        if (!StringUtils.isEmpty(searchData.getSymptom())) {
-            searchData.setSymptom(add2PreResultList(new PretreatmentSymptom(), searchData.getSymptom(), allPreResultList));
-        }
-        //体征数据
-        if (!StringUtils.isEmpty(searchData.getVital())) {
-            searchData.setVital(add2PreResultList(new PretreatmentVital(), searchData.getVital(), allPreResultList));
-        }
-        //lis文本非结构化数据
-        if (!StringUtils.isEmpty(searchData.getLis())) {
-            searchData.setLis(add2PreResultList(new PretreatmentVital(), searchData.getLis(), allPreResultList));
-        }
-        //pacs数据
-        if (!StringUtils.isEmpty(searchData.getPacs())) {
-            searchData.setPacs(add2PreResultList(new PretreatmentVital(), searchData.getPacs(), allPreResultList));
-        }
-    }
-
-    private String add2PreResultList(Pretreatment pretreatment, String content, List<PreResult> allPreResultList) throws java.io.IOException {
-        List<PreResult> preResultList = pretreatment.analyze(content);
-        if (preResultList != null) {
-            allPreResultList.addAll(preResultList);
-
-
-        }
-        return content;
-    }
-}

+ 0 - 41
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentLis.java

@@ -1,41 +0,0 @@
-package org.diagbot.common.push.filter.pretreat;
-
-import org.diagbot.common.push.filter.PreResult;
-import org.diagbot.nlp.participle.word.Lexeme;
-import org.diagbot.nlp.participle.word.LexemePath;
-import org.diagbot.nlp.util.NegativeEnum;
-import org.diagbot.nlp.util.NlpUtil;
-import org.springframework.util.StringUtils;
-
-import java.util.List;
-
-public class PretreatmentLis extends Pretreatment {
-    public List<PreResult> analyze(String content) throws java.io.IOException{
-        return super.analyzeDefault(content);
-    }
-
-    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
-        PreResult result = new PreResult();
-        double value = findNumberValue(lexemes, lexeme, index);
-        if (value == -1) return null;
-        //继续往前找化验明细项
-        if (cursor > 0) cursor--;
-        Lexeme leftLexeme = lexemes.get(cursor);
-        if (NlpUtil.isFeature(leftLexeme.getProperty(), new NegativeEnum[]{NegativeEnum.LIS_NAME})) {
-            result.setDetailName(leftLexeme.getText());
-        } else {
-            return null;
-        }
-        //查找化验套餐
-        int position = cursor - 1;
-        while (position > -1) {
-            leftLexeme = lexemes.get(position);
-            if (NlpUtil.isFeature(leftLexeme.getProperty(), new NegativeEnum[]{NegativeEnum.LIS_TYPE})) {
-                result.setName(leftLexeme.getText());
-                break;
-            }
-            position--;
-        }
-        return result;
-    }
-}

+ 0 - 17
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentSymptom.java

@@ -1,17 +0,0 @@
-package org.diagbot.common.push.filter.pretreat;
-
-import org.diagbot.common.push.filter.PreResult;
-import org.diagbot.nlp.participle.word.Lexeme;
-import org.diagbot.nlp.participle.word.LexemePath;
-
-import java.util.List;
-
-public class PretreatmentSymptom extends Pretreatment {
-    public List<PreResult> analyze(String content) throws java.io.IOException{
-        return super.analyzeDefault(content);
-    }
-
-    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
-        return super.createDefaultPreResult(lexemes, lexeme, index);
-    }
-}

+ 0 - 17
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentVital.java

@@ -1,17 +0,0 @@
-package org.diagbot.common.push.filter.pretreat;
-
-import org.diagbot.common.push.filter.PreResult;
-import org.diagbot.nlp.participle.word.Lexeme;
-import org.diagbot.nlp.participle.word.LexemePath;
-
-import java.util.List;
-
-public class PretreatmentVital extends Pretreatment {
-    public List<PreResult> analyze(String content) throws java.io.IOException{
-        return super.analyzeDefault(content);
-    }
-
-    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
-        return super.createDefaultPreResult(lexemes, lexeme, index);
-    }
-}

+ 80 - 0
common-push/src/main/java/org/diagbot/common/push/filter/rule/CalcFormula.java

@@ -0,0 +1,80 @@
+package org.diagbot.common.push.filter.rule;
+
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.nlp.rule.module.PreResult;
+
+import java.util.List;
+
+/**
+ * @Description: 需要走规则的计算公式
+ * @Author: HUJING
+ * @Date: 2019/12/31 14:10
+ */
+public class CalcFormula {
+    public String gfrCalcMethod(SearchData searchData) {
+        String text = "";
+        List<PreResult> lis = searchData.getLis();
+        String crValue = "";
+        String units = "";
+        boolean hasCr = false;
+        for (PreResult preResult : lis) {
+            if ("肌酐(Cr)".equals(preResult.getDetailName()) && StringUtils.isNotEmpty(preResult.getValue())) {
+                crValue = preResult.getValue();
+                units = preResult.getUnits();
+                hasCr = true;
+            }
+        }
+        if (!hasCr) {
+            return text;
+        }
+
+        int age = searchData.getAge();
+        double scr = 0d;
+        float k = 0f;
+        double a = 0d;
+        double denger = 0d;
+
+        if ("umol/L".equals(units)) {
+            scr = Double.parseDouble(crValue) / 88.41;
+        } else if ("mg/dL".equals(units)) {
+            scr = Double.parseDouble(crValue);
+        }
+
+        String sex = searchData.getSex();
+        if ("1".equals(sex) || "M".equals(sex)) {
+            k = 0.9f;
+            denger = 1d;
+            if (scr <= 0.90) {
+                a = -0.411;
+            } else {
+                a = -1.209;
+            }
+        } else if ("2".equals(sex) || "F".equals(sex)) {
+            k = 0.7f;
+            denger = 1.018;
+            if (scr <= 0.70) {
+                a = -0.329;
+            } else {
+                a = -1.209;
+            }
+        }
+
+        double eGFR3 = 141 * Math.pow((scr / k), a) * Math.pow(0.993, age) * denger;
+
+        if (eGFR3 <= 0 || Double.POSITIVE_INFINITY == eGFR3) {
+            return text;
+        }
+
+        if (eGFR3 > 0 && eGFR3 <= 29) {
+            text = "重度肾功能不全";
+        } else if (eGFR3 > 29 && eGFR3 < 60) {
+            text = "中度肾功能不全";
+        } else if (eGFR3 >= 60 && eGFR3 <= 89) {
+            text = "轻度肾功能不全";
+        } else if (eGFR3 > 89) {
+            text = "肾功能正常";
+        }
+        return text;
+    }
+}

+ 335 - 0
common-push/src/main/java/org/diagbot/common/push/filter/rule/PretreatmentRule.java

@@ -0,0 +1,335 @@
+package org.diagbot.common.push.filter.rule;
+
+
+import org.diagbot.common.push.bean.Rule;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
+import org.diagbot.nlp.rule.module.PreResult;
+import org.diagbot.nlp.rule.pretreat.*;
+import org.diagbot.nlp.util.NlpUtil;
+import org.springframework.util.StringUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+public class PretreatmentRule {
+    public void rule(SearchData searchData) throws java.io.IOException {
+        //患者基本信息 性别 年龄
+        if (!StringUtils.isEmpty(searchData.getNormal())) {
+            add2PreResultList(new PretreatmentNormal(), searchData.getNormal(), "normal", searchData);
+        }
+        //患者基本信息 婚姻
+        if (!StringUtils.isEmpty(searchData.getMarriage())) {
+            add2PreResultList(new PretreatmentNormal(), searchData.getMarriage(), "normal", searchData);
+        }
+        //症状规则 主诉、现病史提取
+        if (!StringUtils.isEmpty(searchData.getChief())) {
+            searchData.setChief(add2PreResultList(new PretreatmentSymptom(), searchData.getChief(), "symptom", searchData));
+        }
+        //现病史 提取手术史和过敏史
+        if (!StringUtils.isEmpty(searchData.getSymptom())) {
+            add2PreResultList(new PretreatmentOperation(), searchData.getSymptom(), "operation", searchData);   //手术史
+            add2PreResultList(new PretreatmentAllergy(), searchData.getSymptom(), "allergy", searchData);       //过敏史
+            add2PreResultList(new PretreatmentSymptom(), searchData.getSymptom(), "symptom", searchData);       //症状
+            add2PreResultList(new PretreatmentInfectious(), searchData.getSymptom(), "infectious", searchData);     //传染病史
+            add2PreResultList(new PretreatmentPacs(), searchData.getSymptom(), "pacs", searchData);         //检查
+        }
+        //体征数据 体征、现病史提取
+        if (!StringUtils.isEmpty(searchData.getVital())) {
+            searchData.setVital(add2PreResultList(new PretreatmentVital(), searchData.getVital(), "vital", searchData));
+        }
+        //历史诊断数据  诊断信息(历史)、现病史、既往史(重要疾病史)
+        if (searchData.getDiag() != null && searchData.getDiag().size() > 0) {
+            List<PreResult> preResults = searchData.getDiag();
+            for (PreResult result : preResults) {
+                result.setValue(result.getUniqueName());
+                result.setUniqueName("诊断--");
+            }
+            searchData.setDiagString(add2PreResultList(searchData.getDiag(), searchData.getDiagString(), "diag", searchData));
+        } else if (!StringUtils.isEmpty(searchData.getDiagString())) {
+            searchData.setDiagString(add2PreResultList(new PretreatmentDiag(), searchData.getDiagString(), "diag", searchData));
+        }
+        //药品数据  药品信息(历史)、现病史、既往史
+        if (searchData.getDrug() != null && searchData.getDrug().size() > 0) {
+            List<PreResult> preResults = searchData.getDrug();
+            for (PreResult result : preResults) {
+                result.setValue(result.getUniqueName());
+                result.setUniqueName("药品--");
+            }
+            searchData.setDrugString(add2PreResultList(searchData.getDrug(), searchData.getDrugString(), "drug", searchData));
+        } else if (!StringUtils.isEmpty(searchData.getDrugString())) {
+            searchData.setDrugString(add2PreResultList(new PretreatmentDrug(), searchData.getDrugString(), "drug", searchData));
+        }
+        //既往史
+        if (!StringUtils.isEmpty(searchData.getPasts())) {
+            add2PreResultList(new PretreatmentPast(), searchData.getPasts(), "pasts", searchData);
+            add2PreResultList(new PretreatmentOperation(), searchData.getPasts(), "operation", searchData);
+            add2PreResultList(new PretreatmentAllergy(), searchData.getPasts(), "allergy", searchData);
+            add2PreResultList(new PretreatmentDrug(), searchData.getPasts(), "drug", searchData);
+            add2PreResultList(new PretreatmentDiag(), searchData.getPasts(), "diag", searchData);
+            add2PreResultList(new PretreatmentInfectious(), searchData.getPasts(), "infectious", searchData);
+        }
+        //手术外伤史 现病史、手术史
+        if (!StringUtils.isEmpty(searchData.getOperation())) {
+            add2PreResultList(new PretreatmentOperation(), searchData.getOperation(), "operation", searchData);
+            add2PreResultList(new PretreatmentWound(), searchData.getOperation(), "wound", searchData);
+        }
+        //过敏 现病史、过敏史
+        if (!StringUtils.isEmpty(searchData.getAllergy())) {
+            add2PreResultList(new PretreatmentAllergy(), searchData.getAllergy(), "allergy", searchData);
+        }
+        //传染病   现病史、传染病史
+        if (!StringUtils.isEmpty(searchData.getInfectious())) {
+            add2PreResultList(new PretreatmentInfectious(), searchData.getInfectious(), "infectious", searchData);
+        }
+        //个人史
+        if (!StringUtils.isEmpty(searchData.getPersonal())) {
+            add2PreResultList(new PretreatmentPersonal(), searchData.getPersonal(), "personal", searchData);
+        }
+        //家族史
+        if (!StringUtils.isEmpty(searchData.getFamily())) {
+            add2PreResultList(new PretreatmentFamily(), searchData.getFamily(), "family", searchData);
+        }
+        //接种史
+        if (!StringUtils.isEmpty(searchData.getVaccination())) {
+            add2PreResultList(new PretreatmentVaccination(), searchData.getVaccination(), "vaccination", searchData);
+        }
+        //其他史
+        if (!StringUtils.isEmpty(searchData.getOther())) {
+            add2PreResultList(new PretreatmentPast(), searchData.getOther(), "pasts", searchData);
+            add2PreResultList(new PretreatmentOperation(), searchData.getOther(), "operation", searchData);
+            add2PreResultList(new PretreatmentAllergy(), searchData.getOther(), "allergy", searchData);
+            add2PreResultList(new PretreatmentWound(), searchData.getOther(), "wound", searchData);
+            add2PreResultList(new PretreatmentInfectious(), searchData.getOther(), "infectious", searchData);
+            add2PreResultList(new PretreatmentPersonal(), searchData.getOther(), "personal", searchData);
+            add2PreResultList(new PretreatmentFamily(), searchData.getOther(), "family", searchData);
+            add2PreResultList(new PretreatmentVaccination(), searchData.getOther(), "vaccination", searchData);
+            add2PreResultList(new PretreatmentDrug(), searchData.getOther(), "drug", searchData);
+            add2PreResultList(new PretreatmentDiag(), searchData.getOther(), "diag", searchData);
+        }
+        //lis结构化信息
+        if (searchData.getLis() != null && searchData.getLis().size() > 0) {
+            List<PreResult> preResults = searchData.getLis();
+            for (PreResult result : preResults) {
+                result.setUniqueName("化验--" + result.getUniqueName());
+            }
+            searchData.setLisString(add2PreResultList(searchData.getLis(), searchData.getLisString(), "lis", searchData));
+        } else if (!StringUtils.isEmpty(searchData.getLisString())) {
+            searchData.setLisString(add2PreResultList(new PretreatmentLis(), searchData.getLisString(), "lis", searchData));
+        }
+        //pacs结构化信息
+        if (searchData.getPacs() != null && searchData.getPacs().size() > 0) {
+            List<PreResult> preResults = searchData.getPacs();
+            for (PreResult result : preResults) {
+                result.setValue(result.getResult());
+                result.setUniqueName("检查--");
+            }
+            searchData.setPacsString(add2PreResultList(searchData.getPacs(), searchData.getPacsString(), "pacs", searchData));
+        } else if (!StringUtils.isEmpty(searchData.getPacsString())) {
+            searchData.setPacsString(add2PreResultList(new PretreatmentPacs(), searchData.getPacsString(), "pacs", searchData));
+        }
+
+        //开具 化验
+        if (searchData.getLisOrder() != null && searchData.getLisOrder().size() > 0) {
+            List<PreResult> preResults = searchData.getLisOrder();
+            for (PreResult result : preResults) {
+                result.setValue(result.getUniqueName());
+                result.setHisName(result.getName());
+                result.setUniqueName("开单--");
+            }
+            add2PreResultList(searchData.getLisOrder(), "", "lisOrder", searchData);
+        }
+        //开具 检查
+        if (searchData.getPacsOrder() != null && searchData.getPacsOrder().size() > 0) {
+            List<PreResult> preResults = searchData.getPacsOrder();
+            for (PreResult result : preResults) {
+                result.setValue(result.getUniqueName());
+                result.setHisName(result.getName());
+                result.setUniqueName("开单--");
+            }
+            add2PreResultList(searchData.getPacsOrder(), "", "pacsOrder", searchData);
+        }
+        //开具 诊断
+        if (searchData.getDiagOrder() != null && searchData.getDiagOrder().size() > 0) {
+            List<PreResult> preResults = searchData.getDiagOrder();
+            for (PreResult result : preResults) {
+                result.setValue(result.getUniqueName());
+                result.setUniqueName("开单--");
+            }
+            add2PreResultList(searchData.getDiagOrder(), "", "diagOrder", searchData);
+        }
+        //开具 药品
+        if (searchData.getDrugOrder() != null && searchData.getDrugOrder().size() > 0) {
+            List<PreResult> preResults = searchData.getDrugOrder();
+            for (PreResult result : preResults) {
+                result.setValue(result.getUniqueName());
+                result.setUniqueName("开单--");
+            }
+            add2PreResultList(searchData.getDrugOrder(), "", "drugOrder", searchData);
+        }
+        //开具 手术
+        if (searchData.getOperationOrder() != null && searchData.getOperationOrder().size() > 0) {
+            List<PreResult> preResults = searchData.getOperationOrder();
+            for (PreResult result : preResults) {
+                result.setValue(result.getUniqueName());
+                result.setHisName(result.getName());
+                result.setUniqueName("开单--");
+            }
+            add2PreResultList(searchData.getOperationOrder(), "", "operationOrder", searchData);
+        }
+
+//        Map<String, List<Rule>> ruleMap = searchData.getRules();
+//        for (Map.Entry<String, List<Rule>> entry : ruleMap.entrySet()) {
+//            for (Rule rule : entry.getValue()) {
+//                System.out.println("id:" + rule.getId() + "; pub_name: " + rule.getPub_name());
+//            }
+//        }
+        if (searchData.getLis() != null && searchData.getLis().size() > 0) {
+            //计算GFR值
+            CalcFormula calcFormula = new CalcFormula();
+            String gfrResult = calcFormula.gfrCalcMethod(searchData);
+            if (!StringUtils.isEmpty(gfrResult)) {
+                PreResult preResult = new PreResult();
+                preResult.setValue(gfrResult);
+                preResult.setUniqueName("检查--");
+                searchData.getPacs().add(preResult);
+                searchData.setPacsString(add2PreResultList(searchData.getPacs(), searchData.getPacsString(), "pacs", searchData));
+            }
+        }
+    }
+
+    private String add2PreResultList(Pretreatment pretreatment, String content, String inputType, SearchData searchData) throws java.io.IOException {
+        List<PreResult> preResultList = pretreatment.analyze(content);
+        return add2PreResultList(preResultList, content, inputType, searchData);
+    }
+
+    private String add2PreResultList(List<PreResult> preResultList, String content, String inputType, SearchData searchData) throws java.io.IOException {
+        Map<String, List<Rule>> kl_rule_filter_map = ApplicationCacheUtil.getKl_rule_filter_map();
+        //符合条件的规则
+        Map<String, List<Rule>> accord_rule_map = searchData.getRules();
+        List<Rule> accord_rules = null;
+        if (preResultList != null) {
+            for (PreResult result : preResultList) {
+                //规则库中匹配
+                if (kl_rule_filter_map.get(result.getUniqueName()) != null) {
+                    List<Rule> rules = kl_rule_filter_map.get(result.getUniqueName());
+                    if (rules == null) {
+                        continue;
+                    }
+                    for (Rule rule : rules) {
+                        boolean isSuit = suitRule(result, rule, content);
+                        if (isSuit) {
+                            if (!StringUtils.isEmpty(rule.getOriginText())) {
+                                rule.setOriginText("");
+                            }
+                            if (!StringUtils.isEmpty(result.getDateValue())) {
+                                rule.setOriginText(result.getDateValue());
+                            }
+                            if (NlpUtil.isNumberString(result.getValue())) {
+                                if (StringUtils.isEmpty(rule.getOriginText())) {
+                                    rule.setOriginText(result.getName() + "--" + result.getDetailName() + ":" + result.getValue() + result.getUnits());
+                                } else {
+                                    rule.setOriginText(rule.getOriginText() + " " + result.getName() + "--" + result.getDetailName() + ":" + result.getValue() + result.getUnits());
+                                }
+                            } else {
+                                if (StringUtils.isEmpty(rule.getOriginText())) {
+                                    rule.setOriginText(result.getValue());
+                                } else {
+                                    rule.setOriginText(rule.getOriginText() + " " + result.getValue());
+                                }
+                            }
+                            if (accord_rule_map.get(inputType) == null) {
+                                accord_rules = new ArrayList<>();
+                            } else {
+                                accord_rules = accord_rule_map.get(inputType);
+                            }
+                            //在满足规则里存放his项目名称
+                            rule.setHisName(result.getHisName());
+                            accord_rules.add(rule);
+                            accord_rule_map.put(inputType, accord_rules);
+                            searchData.setRules(accord_rule_map);
+
+                            content = content + (rule.getSuffixInfo() == null ? "" : rule.getSuffixInfo());
+                        }
+                    }
+                }
+            }
+
+        }
+        return content;
+    }
+
+    private boolean suitRule(PreResult result, Rule rule, String content) {
+        //标准值最优先匹配
+        if (org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getEq_value())) {
+            if (compareEqual(result.getValue(), rule.getEq_value())) {
+                return true;
+            } else {
+                return false;
+            }
+        } else if (org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getMax_value()) && org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getMin_value())) {
+            if (compareMin(result.getValue(), rule.getMax_value(), rule.getMax_operator()) //比最大值小
+                    && compareMax(result.getValue(), rule.getMin_value(), rule.getMin_operator()) //比最小值大
+                    && result.getUnits().equals(rule.getMin_unit())
+                    && result.getUnits().equals(rule.getMax_unit())) {
+                return true;
+            } else {
+                return false;
+            }
+        } else if (org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getMin_value())) {
+            if (compareMin(result.getValue(), rule.getMin_value(), rule.getMin_operator())   //比最小值小
+                    && result.getUnits().equals(rule.getMin_unit())) {
+                return true;
+            } else {
+                return false;
+            }
+        } else if(org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getMax_value()))  {
+            if (compareMax(result.getValue(), rule.getMax_value(), rule.getMax_operator())   //比最大值大
+                    && result.getUnits().equals(rule.getMax_unit())) {
+                return true;
+            } else {
+                return false;
+            }
+        }
+        return false;
+    }
+
+
+    private boolean compareEqual(String c1, String c2) {
+        if (!StringUtils.isEmpty(c1) && !StringUtils.isEmpty(c2)
+                && c1.equals(c2)) {
+            return true;
+        }
+        return false;
+    }
+
+    private boolean compareMin(String c1, String c2, String operator) {
+        if (!StringUtils.isEmpty(c1) && !StringUtils.isEmpty(c2) && !StringUtils.isEmpty(operator)) {
+            try {
+                if (operator.contains("=")) {
+                    return Double.valueOf(c1) <= Double.valueOf(c2);
+                } else {
+                    return Double.valueOf(c1) < Double.valueOf(c2);
+                }
+            } catch (Exception e) {
+            }
+        }
+        return false;
+    }
+
+    private boolean compareMax(String c1, String c2, String operator) {
+        if (!StringUtils.isEmpty(c1) && !StringUtils.isEmpty(c2) && !StringUtils.isEmpty(operator)) {
+            try {
+                if (operator.contains("=")) {
+                    return Double.valueOf(c1) >= Double.valueOf(c2);
+                } else {
+                    return Double.valueOf(c1) > Double.valueOf(c2);
+                }
+            } catch (Exception e) {
+            }
+        }
+        return false;
+    }
+
+}

+ 30 - 0
common-push/src/main/java/org/diagbot/common/push/naivebayes/NaiveBayesTest.java

@@ -0,0 +1,30 @@
+package org.diagbot.common.push.naivebayes;
+
+import org.diagbot.common.push.naivebayes.core.AlgorithmNaiveBayesExecutor;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/10/11 14:30
+ */
+public class NaiveBayesTest {
+    public static void main(String[] args) {
+        AlgorithmNaiveBayesExecutor a = new AlgorithmNaiveBayesExecutor();
+        Map<String, Map<String, String>> inputs = new HashMap<>();
+        inputs.put("咽部异物感",new HashMap<>());
+//        inputs.put("腹胀",new HashMap<>());
+//        inputs.put("乏力",new HashMap<>());
+        Map<String, Float> softmax = a.execute(inputs);
+        double i = 0.00;
+        for (Map.Entry<String, Float> s:softmax.entrySet()) {
+            i += s.getValue();
+            if (s.getValue() == 0){
+                System.out.println(s.getKey());
+            }
+        }
+        System.out.println(i);
+    }
+}

+ 92 - 0
common-push/src/main/java/org/diagbot/common/push/naivebayes/core/AlgorithmNaiveBayesExecutor.java

@@ -0,0 +1,92 @@
+package org.diagbot.common.push.naivebayes.core;
+
+import org.algorithm.core.AlgorithmExecutor;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
+
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/10/11 14:25
+ */
+public class AlgorithmNaiveBayesExecutor extends AlgorithmExecutor {
+    private double e = Math.E;
+    private static double unknownProbWithRelevant = -2; //已知有关,但未在病历中统计出来的特征
+    private static double unknownProbWithoutRelevant = -6;  //无关事件间的共现概率
+    private static double denominator = 0.00;
+
+    public Map<String, Float> execute(Map<String, Map<String, String>> inputs) {
+        return softmax(probCalc(inputs));
+    }
+
+    public Map<String, Float> probCalc(Map<String, Map<String, String>> inputs) {
+        Map<String, Map<String, Float>> doc_feature_naivebayes_prob_map = ApplicationCacheUtil.getDoc_feature_naivebayes_prob_map();
+        Map<String, Map<String, Float>> relevant_feature_map = ApplicationCacheUtil.getRelevant_feature_map();
+        Map<String, Float> naivebayesResult = new HashMap<>();
+        for (Map.Entry<String, Map<String, Float>> naivebayesProb : doc_feature_naivebayes_prob_map.entrySet()) {
+            float sum = 0.00f;
+            int i = 1;
+            for (String input : inputs.keySet()) {
+                //先验概率表里有该特征,就使用该特征的先验概率
+                if (naivebayesProb.getValue().containsKey(input)) {
+                    sum += Math.log10(naivebayesProb.getValue().get(input));
+                } else if (relevant_feature_map.get(naivebayesProb.getKey()) != null &&
+                        relevant_feature_map.get(naivebayesProb.getKey()).containsKey(input)) {
+                    //先验概率表里没有该特征 但 关联规则表里有该特征,则平滑处理(默认此时先验概率为10^-2)
+                    sum += unknownProbWithRelevant;
+                } else {
+                    sum += unknownProbWithoutRelevant;
+                }
+
+                if (i == inputs.size()) {
+                    sum += Math.log10(naivebayesProb.getValue().get("priorProb"));
+                    naivebayesResult.put(naivebayesProb.getKey(), sum);
+                }
+                i++;
+            }
+        }
+//        naivebayesResult = sortMap(naivebayesResult);
+        return naivebayesResult;
+    }
+
+    private Map<String, Float> softmax(Map<String, Float> naivebayesResultMap) {
+        Map<String, Float> softmaxResult = new HashMap<>();
+        calaDenominator(naivebayesResultMap);
+
+        for (Map.Entry<String, Float> naivebayesResult : naivebayesResultMap.entrySet()) {
+            softmaxResult.put(naivebayesResult.getKey(), (float) (Math.pow(this.e, naivebayesResult.getValue()) / denominator));
+        }
+
+        softmaxResult = sortMap(softmaxResult);
+        return softmaxResult;
+    }
+
+    private void calaDenominator(Map<String, Float> naivebayesResultMap) {
+        if (denominator == 0) {
+            for (Map.Entry<String, Float> naivebayesResult : naivebayesResultMap.entrySet()) {
+                //计算softmax算法分母
+                denominator += Math.pow(this.e, naivebayesResult.getValue());
+            }
+        }
+    }
+
+    public Map<String, Float> sortMap(Map<String, Float> ResultMap) {
+        ArrayList<Map.Entry<String, Float>> softmaxResultList = new ArrayList<>(ResultMap.entrySet());
+        softmaxResultList.sort(new Comparator<Map.Entry<String, Float>>() {
+            @Override
+            public int compare(Map.Entry<String, Float> o1, Map.Entry<String, Float> o2) {
+                return o2.getValue().compareTo(o1.getValue());
+            }
+        });
+        ResultMap = new LinkedHashMap<>();
+        for (Map.Entry<String, Float> softmaxResultMap : softmaxResultList) {
+            ResultMap.put(softmaxResultMap.getKey(), softmaxResultMap.getValue());
+        }
+        return ResultMap;
+    }
+}

+ 34 - 0
common-push/src/main/java/org/diagbot/common/push/naivebayes/factory/AlgorithmNaiveBayesFactory.java

@@ -0,0 +1,34 @@
+package org.diagbot.common.push.naivebayes.factory;
+
+import org.algorithm.core.AlgorithmExecutor;
+import org.algorithm.core.cnn.model.RelationExtractionEnsembleModel;
+import org.diagbot.common.push.naivebayes.core.AlgorithmNaiveBayesExecutor;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/9/10 15:25
+ */
+public class AlgorithmNaiveBayesFactory {
+    private static AlgorithmNaiveBayesExecutor algorithmNaiveBayesExecutorInstance = null;
+
+    public static AlgorithmExecutor getInstance() {
+        try {
+            algorithmNaiveBayesExecutorInstance = (AlgorithmNaiveBayesExecutor) create(algorithmNaiveBayesExecutorInstance, AlgorithmNaiveBayesExecutor.class);
+        } catch (InstantiationException inst) {
+            inst.printStackTrace();
+        } catch (IllegalAccessException ille) {
+            ille.printStackTrace();
+        }
+        return algorithmNaiveBayesExecutorInstance;
+    }
+
+    private static Object create(Object obj, Class cls) throws InstantiationException, IllegalAccessException {
+        if (obj == null) {
+            synchronized (cls) {
+                obj = cls.newInstance();
+            }
+        }
+        return obj;
+    }
+}

+ 101 - 0
common-push/src/main/java/org/diagbot/common/push/util/CryptUtil.java

@@ -0,0 +1,101 @@
+package org.diagbot.common.push.util;
+
+import java.util.List;
+
+/**
+ * @Description: 加解密工具类
+ * @author: gaodm
+ * @time: 2019/12/30 11:09
+ */
+public class CryptUtil {
+
+    private final static char EN_MAX = '\u0080';//128
+    private final static int MOVE_NUM = 2;
+    private final static char DE_MAX = EN_MAX + MOVE_NUM;
+
+    /**
+     * 加密,把一个字符串在原有的基础上+2
+     *
+     * @param data 需要解密的原字符串
+     * @return 返回解密后的新字符串
+     */
+    public static String encrypt_char(String data) {
+        char[] chars = data.toCharArray();
+        for (int i = 0; i < chars.length; i++) {
+            if (EN_MAX < chars[i]) {
+                chars[i] += MOVE_NUM;
+            }
+        }
+        return new String(chars);
+    }
+
+    /**
+     * 解密:把一个加密后的字符串在原有基础上-2
+     *
+     * @param data 加密后的字符串
+     * @return 返回解密后的新字符串
+     */
+    public static String decrypt_char(String data) {
+        char[] chars = data.toCharArray();
+        for (int i = 0; i < chars.length; i++) {
+            if (DE_MAX < chars[i]) {
+                chars[i] -= MOVE_NUM;
+            }
+        }
+        return new String(chars);
+    }
+
+
+    /**
+     * 对List<String>进行加密
+     *
+     * @param list 加密前的list
+     * @return 加密后的list
+     */
+    public static void encryptList(List<String> list) {
+        if (ListUtil.isNotEmpty(list)) {
+            for (int i = 0; i < list.size(); i++) {
+                list.set(i, CryptUtil.encrypt_char(list.get(i)));
+            }
+        }
+    }
+
+    /**
+     * 对List<String>进行解密
+     * @param list 解密前的list
+     * @return 解密后的list
+     */
+    public static void decryptList(List<String> list) {
+        if (ListUtil.isNotEmpty(list)) {
+            for (int i = 0; i < list.size(); i++) {
+                list.set(i, CryptUtil.decrypt_char(list.get(i)));
+            }
+        }
+    }
+
+
+
+    public static void main(String[] args) {
+        //加密英文
+        String data = "解密后:�dsfaa2132159-4331}~\u007F";
+        String charResult = encrypt_char(data);
+        System.out.println("加密后:" + charResult);
+        //解密
+        String charStr = decrypt_char(charResult);
+        System.out.println("解密后:" + charStr);
+
+
+        //加密中文
+        data = "跳梁小豆tlxd666,z";
+        String result = encrypt_char(data);
+        System.out.println("加密后:" + result);
+        String str1 = decrypt_char(result);
+        System.out.println("解密后:" + str1);
+
+//        int num = 32;
+//        while (num <= 128) {
+//            System.out.println((char) num + "  (Unicode编码对应的数字为:) " + num);
+//            num++;
+//        }
+    }
+}

+ 88 - 0
common-push/src/main/java/org/diagbot/common/push/util/ListUtil.java

@@ -0,0 +1,88 @@
+package org.diagbot.common.push.util;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * @Description: List 工具类
+ * @author: gaodm
+ * @date: 2017/12/28 15:36
+ * @version: V1.0
+ */
+public class ListUtil {
+    /**
+     * list的第一行
+     */
+    public static final int FIRST = 0;
+
+    private ListUtil() {
+
+    }
+
+    /**
+     * 创建List对象
+     *
+     * @param <E> 泛型,
+     * @return
+     */
+    public static <E> ArrayList<E> newArrayList() {
+        return new ArrayList<>();
+    }
+
+    /**
+     * 判断List是否为空
+     *
+     * @param list
+     * @return
+     */
+    public static boolean isEmpty(List list) {
+        if (null == list) {
+            return Boolean.TRUE;
+        }
+        if (list.isEmpty()) {
+            return Boolean.TRUE;
+        }
+        if (list.size() < 1) {
+            return Boolean.TRUE;
+        }
+        return Boolean.FALSE;
+    }
+
+    /**
+     * 判断List是否为非空
+     *
+     * @param list
+     * @return
+     */
+    public static boolean isNotEmpty(List list) {
+        return !isEmpty(list);
+    }
+
+    /**
+     * 获取List集合中第一个对象,前提是自己先判断这个list不会为空
+     *
+     * @param list
+     * @param <E>
+     * @return
+     */
+    public static <E> E firstEntity(List<E> list) {
+        if (isEmpty(list)) {
+            return null;
+        }
+        return list.get(FIRST);
+    }
+
+    public static <E> ArrayList<E> arrayToList(E[] strArray) {
+        ArrayList<E> arrayList = new ArrayList<>(strArray.length);
+        Collections.addAll(arrayList, strArray);
+        return arrayList;
+    }
+
+    public static void main(String[] args) throws Exception {
+        String[] i ={"A","B"};
+        List<String> o = arrayToList(i);
+        System.out.println("输入参数:"+ i);
+        System.out.println("输出参数:"+ o);
+    }
+}

+ 33 - 2
bigdata-web/src/main/java/org/diagbot/bigdata/util/BigDataConstants.java

@@ -1,4 +1,7 @@
-package org.diagbot.bigdata.util;
+package org.diagbot.common.push.util;
+
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  * @ClassName org.diagbot.bigdata.util.BigDataConstants
@@ -7,7 +10,7 @@ package org.diagbot.bigdata.util;
  * @Date 2019/1/16/016 14:06
  * @Version 1.0
  **/
-public class BigDataConstants {
+public class PushConstants {
     public final static String resource_type_i = "I";       //住院
     public final static String resource_type_o = "O";       //门诊
     public final static String resource_type_e = "E";       //急诊
@@ -35,7 +38,35 @@ public class BigDataConstants {
     public final static String standard_info_relation_type_2 = "2";    //同义词
     public final static String standard_info_relation_type_3 = "3";    //大小类
 
+    //规则类型1:危急值提醒  2:开单合理性  3:管理评估  4:不良反应  5:药物推荐  6:病情提示
+    public final static String rule_app_type_id_1   = "1";
+    public final static String rule_app_type_id_2   = "2";
+    public final static String rule_app_type_id_3   = "3";
+    public final static String rule_app_type_id_4   = "4";
+    public final static String rule_app_type_id_5   = "5";
+    public final static String rule_app_type_id_6   = "6";
+
+
     public final static String result_mapping_vital = "resultMappingVitalMap";          //推送体征结果名称映射
     public final static String result_mapping_diag = "resultMappingDiagMap";          //推送疾病科室名称映射
     public final static String result_mapping_filter = "resultMappingFilterMap";          //推送结果年龄 性别过滤
+
+    //关系抽取property_id对应property_name
+    public final static Map<String,String> featureTypeMap = new HashMap<String,String>(){{
+        put("80","辅检其他");
+        put("9","单位");
+        put("2","时间");
+        put("3","部位");
+        put("7","反意或虚拟");
+        put("16","辅检项目");
+        put("17","辅检结果");
+        put("81","属性");
+        put("82","方位");
+        put("83","形容词");
+        put("84","局部结构");
+        put("85","属性值");
+        put("86","表现");
+        put("28","字母与数值");
+        put("87","正常表现");
+    }};
 }

+ 288 - 0
common-push/src/main/java/org/diagbot/common/push/work/ParamsDataProxy.java

@@ -0,0 +1,288 @@
+package org.diagbot.common.push.work;
+
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.util.PushConstants;
+import org.diagbot.nlp.feature.FeatureAnalyze;
+import org.diagbot.nlp.feature.FeatureType;
+import org.diagbot.nlp.rule.module.PreResult;
+import org.diagbot.nlp.util.Constants;
+import org.diagbot.pub.utils.PropertiesUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+
+/**
+ * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 14:04
+ * @Version 1.0
+ **/
+public class ParamsDataProxy {
+    Logger logger = LoggerFactory.getLogger(ParamsDataProxy.class);
+    private String featureNum = "";//特征提取范围
+
+    public void createNormalInfo(SearchData searchData) throws Exception {
+        //计算年龄区间
+        if (searchData.getAge() > 0) {
+            searchData.setAge_start(searchData.getAge() - 5);
+            searchData.setAge_end(searchData.getAge() + 5);
+
+            searchData.setNormal("年龄" + searchData.getAge() + "岁");
+        }
+        //修改性别代码
+        if (!StringUtils.isEmpty(searchData.getSex())) {
+            if ("M".equals(searchData.getSex())) {
+                searchData.setSex("1");
+                searchData.setNormal(searchData.getNormal() + "性别男性");
+            } else if ("F".equals(searchData.getSex())) {
+                searchData.setSex("2");
+                searchData.setNormal(searchData.getNormal() + "性别女性");
+            } else {
+                searchData.setSex("3");
+                searchData.setNormal(searchData.getNormal() + "性别其他");
+            }
+        } else {
+            searchData.setSex("3");
+        }
+        //婚姻情况
+        if (!StringUtils.isEmpty(searchData.getMarriage())) {
+            if ("1".equals(searchData.getMarriage())) {
+                searchData.setNormal(searchData.getNormal() + "婚姻己婚");
+            }
+            if ("2".equals(searchData.getMarriage())) {
+                searchData.setNormal(searchData.getNormal() + "婚姻未婚");
+            }
+        }
+    }
+
+    public void createSearchData(SearchData searchData) throws Exception {
+        //消除空格
+        if (searchData.getSymptom() != null) {
+            searchData.setSymptom(searchData.getSymptom().trim());
+        }
+        //默认查询门诊数据
+        if (StringUtils.isEmpty(searchData.getResourceType())) {
+            searchData.setResourceType(PushConstants.resource_type_o);
+        }
+        if (StringUtils.isNotEmpty(searchData.getSymptom())) {
+            searchData.setSymptom(searchData.getSymptom().trim());
+        }
+        //一次推送多个类别信息
+        String[] featureTypes = searchData.getFeatureType().split(",");
+        //featureType统一转换
+        String[] convertFeatureTypes = new String[featureTypes.length];
+        for (int i = 0; i < featureTypes.length; i++) {
+            convertFeatureTypes[i] = convertFeatureType(searchData.getSysCode(), featureTypes[i]);
+        }
+        searchData.setFeatureType(StringUtils.join(convertFeatureTypes, ","));
+        searchData.setFeatureTypes(convertFeatureTypes);
+
+        //获取入参中的特征信息
+        FeatureAnalyze fa = new FeatureAnalyze();
+        List<Map<String, Object>> featuresList = new ArrayList<>();
+        //获取配置文件中的特征范围
+        PropertiesUtil propertiesUtil = new PropertiesUtil("nlp.properties");
+        featureNum = propertiesUtil.getProperty("push.feature.num");
+        fa.setFeatureNum(featureNum);
+        if (!StringUtils.isEmpty(searchData.getChief())) {
+            //提取主诉
+            featuresList = fa.start(searchData.getChief(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getSymptom())) {
+            //提取现病史
+            featuresList = fa.start(searchData.getSymptom(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+            //提取时间信息
+            featuresList = fa.start(searchData.getSymptom(), FeatureType.TIME);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getVital())) {
+            //提取体征
+            featuresList = fa.start(searchData.getVital(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getPasts())) {
+            //提取既往史
+            featuresList = fa.start(searchData.getPasts(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getOther()) || !StringUtils.isEmpty(searchData.getIndications())) {
+            //提取其他史等
+            featuresList = fa.start((searchData.getOther() == null ? "" : searchData.getOther()) + (searchData.getIndications() == null ? "" : searchData.getIndications()), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getPacsString())) {
+            featuresList = fa.start(searchData.getPacsString(), FeatureType.PACS);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getLisString())) {
+            featuresList = fa.start(searchData.getLisString(), FeatureType.LIS);
+            paramFeatureInit(searchData, featuresList);
+        }
+        // 清洗特征词,去除词性不匹配的词
+        searchData = cleanFeature(featuresList, fa, searchData);
+        if (!StringUtils.isEmpty(searchData.getPasts())) {
+            //如果既往史中诊断信息,需要提取这个特征
+            featuresList = fa.start(searchData.getPasts(), FeatureType.DIAG);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (searchData.getDiagOrder() != null && searchData.getDiagOrder().size() > 0 && StringUtils.isEmpty(searchData.getSymptom())) {
+            String d = "";
+            for (PreResult preResult : searchData.getDiagOrder()) {
+                d = preResult.getUniqueName() + "," + d;
+            }
+            featuresList = fa.start(d, FeatureType.DIAG);
+            paramFeatureInit(searchData, featuresList);
+        }
+//        if (!StringUtils.isEmpty(searchData.getPacs())) {
+//            //关系抽取模型
+//            AlgorithmCNNExecutorPacs algorithmCNNExecutor = RelationExtractionFactory.getInstance();
+//            RelationExtractionUtil re = new RelationExtractionUtil();
+//            //Pacs原始分词结果
+//            List<List<String>> execute = algorithmCNNExecutor.execute(searchData.getPacs(), re.createTriad(searchData));
+//            if (execute != null && execute.size() > 0) {
+//                re.addToSearchDataInputs(execute, searchData);
+//            }
+//        }
+        //模型需要病历文本信息传入
+        Map<String, String> map = new HashMap<>();
+        if (!StringUtils.isEmpty(searchData.getChief()) && !StringUtils.isEmpty(searchData.getSymptom())) {
+            map.put("sentence", searchData.getChief() + searchData.getSymptom());
+        } else if (!StringUtils.isEmpty(searchData.getChief())) {
+            map.put("sentence", searchData.getChief());
+        } else if (!StringUtils.isEmpty(searchData.getSymptom())) {
+            map.put("sentence", searchData.getSymptom());
+        }
+        if (map.get("sentence") != null) {
+            searchData.getInputs().put("sentence", map);
+        }
+    }
+
+    /**
+     * 外部系统featureType需要转化为大数据定义的featureType
+     *
+     * @param sysCode
+     * @param featureType
+     * @return
+     */
+    private String convertFeatureType(String sysCode, String featureType) {
+        if (StringUtils.isEmpty(sysCode) || sysCode.equals("1")) {
+            if ("1".equals(featureType)) {
+                return PushConstants.feature_type_symptom;
+            }
+            if ("7".equals(featureType)) {
+                return PushConstants.feature_type_diag;
+            }
+            if ("4".equals(featureType)) {
+                return PushConstants.feature_type_vital;
+            }
+            if ("5".equals(featureType)) {
+                return PushConstants.feature_type_lis;
+            }
+            if ("6".equals(featureType)) {
+                return PushConstants.feature_type_pacs;
+            }
+            if ("3".equals(featureType)) {
+                return PushConstants.feature_type_history;
+            }
+            if ("8".equals(featureType)) {
+                return PushConstants.feature_type_treat;
+            }
+            if ("22".equals(featureType)) {
+                return PushConstants.feature_type_labelpush;
+            }
+            if ("11".equals(featureType)) {
+                return PushConstants.feature_type_manju;
+            }
+            if ("42".equals(featureType)) {
+                return PushConstants.feature_type_vital_index;
+            }
+            return null;
+        }
+        return featureType;
+    }
+
+    private SearchData cleanFeature(List<Map<String, Object>> featuresList, FeatureAnalyze fa,
+                                    SearchData searchData) {
+        // 在输入的辅检文本中,只提取辅检信息
+        String[] PACS_Feature = { Constants.word_property_PACS,
+                Constants.word_property_PACS_Detail, Constants.word_property_PACS_Result };
+        searchData = removeFeature(searchData.getLisString(), fa, searchData, PACS_Feature, FeatureType.PACS);
+
+        // 在输入的化验文本中,只提取化验信息
+        String[] LIS_Feature = { Constants.word_property_LIS,
+                Constants.word_property_LIS_Detail, Constants.word_property_LIS_Result };
+        searchData = removeFeature(searchData.getPacsString(), fa, searchData, LIS_Feature, FeatureType.LIS);
+
+        return searchData;
+    }
+
+    private SearchData removeFeature(String text, FeatureAnalyze fa,
+                                     SearchData searchData, String[] properties, FeatureType featureType) {
+        String name = "";
+        Boolean related = false;
+
+        try {
+            List<Map<String, Object>> featureList = fa.start(text, featureType);
+            if (featureList != null) {
+                for (Map<String, Object> item : featureList) {
+                    name = item.get("feature_name").toString();
+                    String[] property = item.get("property").toString().split(",");
+                    for (String prop : property) {
+                        if (Arrays.asList(properties).contains(prop)) {
+                            //                            related = true;
+                            searchData.getInputs().remove(name);
+                            break;
+                        }
+                    }
+                }
+            }
+
+        } catch (Exception ex) {
+            ex.printStackTrace();
+        } finally {
+            return searchData;
+        }
+    }
+
+    /**
+     * 推送模型入参
+     *
+     * @param searchData
+     * @throws Exception
+     */
+    private void paramFeatureInit(SearchData searchData, List<Map<String, Object>> featuresList) throws Exception {
+        if (featuresList != null && featuresList.size() > 0) {
+            Map<String, Object> featureMap = null;
+            for (int i = 0; i < featuresList.size(); i++) {
+                featureMap = featuresList.get(i);
+                Map<String, String> map = new HashMap<>();
+                for (Map.Entry<String, Object> entry : featureMap.entrySet()) {
+                    map.put(entry.getKey(), String.valueOf(entry.getValue()));
+                }
+                map.put("featureType", String.valueOf(featureMap.get("feature_type")));
+                map.put("featureName", String.valueOf(featureMap.get("feature_name")));
+                map.put("property", String.valueOf(featureMap.get("property")));
+                map.put("concept", String.valueOf(featureMap.get("concept")));
+                if (Constants.default_negative.equals(featureMap.get("negative"))) {
+                    if (map.get("featureType").equals(Constants.feature_type_time)) {
+//                        searchData.getInputs().put("时间", map);
+                    } else {
+                        if (searchData.getInputs().get(map.get("feature_name")) == null) {
+                            if (i < 8) {
+                                searchData.getInputs().put(map.get("feature_name"), map);
+                            }
+                            searchData.getGraphInputs().put(map.get("feature_name"), map);
+                        }
+                    }
+                } else {
+                    searchData.getFilters().put(map.get("feature_name"), map);
+                }
+            }
+        }
+    }
+}

+ 95 - 0
common-push/src/main/java/org/diagbot/common/push/work/RelationExtractionUtil.java

@@ -0,0 +1,95 @@
+package org.diagbot.common.push.work;
+
+import org.algorithm.core.cnn.entity.Lemma;
+import org.algorithm.core.cnn.entity.Triad;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
+import org.diagbot.common.push.util.PushConstants;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.Constants;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/9/9 17:30
+ */
+public class RelationExtractionUtil {
+    public List<Triad> createTriad(SearchData searchData) throws IOException {
+        List<Triad> triads = new ArrayList<>();
+        String[] pacsSplits = searchData.getPacsString().trim().split("。|\n");
+        List<Lemma> lemmaList = new ArrayList<>();
+        Lemma lemma = null;
+        for (String pacsSplit : pacsSplits) {
+            LexemePath<Lexeme> pacsLexemes = ParticipleUtil.participlePacs(pacsSplit);
+            for (int i = 0; i < pacsLexemes.size(); i++) {
+                //跳过非医学词
+                if (Constants.word_property_other.equals(pacsLexemes.get(i).getProperty())) {
+                    continue;
+                }
+                lemma = new Lemma();
+                lemma.setText(pacsLexemes.get(i).getText());
+                lemma.setPosition(String.valueOf(pacsLexemes.get(i).getOffset()) + "," + (Integer.valueOf(pacsLexemes.get(i).getOffset() + pacsLexemes.get(i).getLength()) - 1));
+                lemma.setProperty(PushConstants.featureTypeMap.get(pacsLexemes.get(i).getProperty()));
+                lemmaList.add(lemma);
+            }
+        }
+        for (int i = 0; i < lemmaList.size() - 1; i++) {
+            for (int j = i + 1; j < lemmaList.size(); j++) {
+                Triad triad = new Triad();
+                triad.setL_1(lemmaList.get(i));
+                triad.setL_2(lemmaList.get(j));
+                triads.add(triad);
+            }
+        }
+        return triads;
+    }
+
+    public void addToSearchDataInputs(List<List<String>> relationExtractionContents, SearchData searchData) throws Exception {
+        StringBuffer sb = null;
+        for (List<String> contents : relationExtractionContents) {
+            sb = new StringBuffer();
+            for (String content : contents) {
+                sb.append(content);
+            }
+            if (isExist(sb.toString())) {
+                Map<String, String> map = new HashMap<>();
+                map.put("featureType", Constants.feature_type_pacs);
+                map.put("featureName", sb.toString());
+                map.put("property", Constants.word_property_PACS_Result);
+                map.put("concept", sb.toString());
+                //全是有
+                map.put("negative", Constants.default_negative);
+                if (searchData.getInputs().get(map.get("featureName")) == null) {
+                    searchData.getInputs().put(map.get("featureName"), map);
+                }
+                if (searchData.getGraphInputs().get(map.get("featureName")) == null) {
+                    searchData.getGraphInputs().put(map.get("featureName"), map);
+                }
+            }
+        }
+    }
+
+    /**
+     * 关系抽取输出的content是否在已有诊断依据中存在
+     * @param content
+     * @return
+     */
+    public boolean isExist(String content){
+        Map<String, Map<String, String>> kl_diagnose_detail_filter_map = ApplicationCacheUtil.getKl_diagnose_detail_filter_map();
+        if (kl_diagnose_detail_filter_map.get("4") != null){
+            if (kl_diagnose_detail_filter_map.get("4").containsKey(content)){
+                return true;
+            }
+        }
+        return false;
+    }
+
+}

+ 0 - 28
common-service/src/main/java/org/diagbot/common/javabean/Filnlly.java

@@ -1,28 +0,0 @@
-package org.diagbot.common.javabean;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-public class Filnlly {
-    private List<Indicators> adverseEvent;
-    private ArrayList<Drugs> treatment;
-
-
-    public List<Indicators> getAdverseEvent() {
-        return adverseEvent;
-    }
-
-    public void setAdverseEvent(List<Indicators> adverseEvent) {
-        this.adverseEvent = adverseEvent;
-    }
-
-    public ArrayList<Drugs> getTreatment() {
-        return treatment;
-    }
-
-    public void setTreatment(ArrayList<Drugs> treatment) {
-        this.treatment = treatment;
-    }
-
-}

+ 0 - 0
common-service/src/main/java/org/diagbot/common/javabean/Rule.java


Some files were not shown because too many files changed in this diff