Browse Source

Merge branch 'push-dev' of http://192.168.2.236:10080/louhr/push into push-dev

hujing 5 years ago
parent
commit
76a4e97ddf
100 changed files with 3948 additions and 2269 deletions
  1. 130 0
      algorithm/src/main/java/org/algorithm/core/FilterRule.java
  2. 282 0
      algorithm/src/main/java/org/algorithm/core/RelationTreeUtils.java
  3. 493 0
      algorithm/src/main/java/org/algorithm/core/RuleCheckMachine.java
  4. 3 4
      algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutor.java
  5. 22 0
      algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutorPacs.java
  6. 1 1
      algorithm/src/main/java/org/algorithm/core/cnn/dataset/RelationExtractionDataSet.java
  7. 32 1
      algorithm/src/main/java/org/algorithm/core/cnn/entity/Lemma.java
  8. 40 13
      algorithm/src/main/java/org/algorithm/core/cnn/model/RelationExtractionEnsembleModel.java
  9. 4 4
      algorithm/src/main/java/org/algorithm/core/cnn/model/RelationExtractionModel.java
  10. 2 1
      algorithm/src/main/java/org/algorithm/core/neural/DiagnosisPredictExecutor.java
  11. 30 7
      algorithm/src/main/java/org/algorithm/core/neural/TensorFlowModelLoadFactory.java
  12. 122 13
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSet.java
  13. 113 73
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImpl.java
  14. 11 1
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImplNonParallel.java
  15. 33 0
      algorithm/src/main/java/org/algorithm/factory/RelationExtractionFactory.java
  16. 5 3
      algorithm/src/main/java/org/algorithm/test/ReEnsembleModelTest.java
  17. 9 4
      algorithm/src/main/java/org/algorithm/test/TensorflowExcutorTest.java
  18. 47 40
      algorithm/src/main/java/org/algorithm/test/Test.java
  19. 46 0
      algorithm/src/main/java/org/algorithm/test/TestDiagnosisFilter.java
  20. 34 0
      algorithm/src/main/java/org/algorithm/test/TestReSplit.java
  21. 15 0
      algorithm/src/main/java/org/algorithm/test/TestRelationTreeUtils.java
  22. 140 0
      algorithm/src/main/java/org/algorithm/test/TestRuleCheckMachine.java
  23. 1 1
      algorithm/src/main/java/org/algorithm/util/MysqlConnector.java
  24. 1 1
      algorithm/src/main/resources/algorithm.properties
  25. 6 0
      bigdata-web/pom.xml
  26. 0 233
      bigdata-web/src/main/java/org/diagbot/bigdata/common/ApplicationCacheUtil.java
  27. 0 98
      bigdata-web/src/main/java/org/diagbot/bigdata/common/InitListener.java
  28. 3 3
      bigdata-web/src/main/java/org/diagbot/bigdata/controller/AlgorithmController.java
  29. 0 22
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingDiagMapper.java
  30. 0 22
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingFilterMapper.java
  31. 0 22
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingVitalMapper.java
  32. 0 38
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingDiag.java
  33. 0 38
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingVital.java
  34. 0 12
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingDiagWrapper.java
  35. 0 6
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingFilterWrapper.java
  36. 0 7
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingVitalWrapper.java
  37. 0 55
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingDiagMapper.xml
  38. 0 67
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingFilterMapper.xml
  39. 0 77
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingVitalMapper.xml
  40. 0 8
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingDiagService.java
  41. 0 8
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingFilterService.java
  42. 0 8
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingVitalService.java
  43. 0 21
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingDiagServiceImpl.java
  44. 0 21
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingFilterServiceImpl.java
  45. 0 21
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingVitalServiceImpl.java
  46. 6 9
      bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java
  47. 94 0
      bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataParamsProxy.java
  48. 1 1
      bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataSearchData.java
  49. 0 702
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ParamsDataProxy.java
  50. 3 97
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java
  51. 37 41
      bigdata-web/src/test/java/org/diagbot/AddStandWordTest.java
  52. 135 0
      bigdata-web/src/test/java/org/diagbot/EyeHospitalData.java
  53. 8 2
      common-push/pom.xml
  54. 0 11
      common-push/src/main/java/org/diagbot/common/push/Test.java
  55. 34 0
      common-push/src/main/java/org/diagbot/common/push/bean/CrisisDetail.java
  56. 1 1
      common-service/src/main/java/org/diagbot/common/work/FeatureRate.java
  57. 1 1
      common-service/src/main/java/org/diagbot/common/work/LisDetail.java
  58. 79 0
      common-push/src/main/java/org/diagbot/common/push/bean/PreResult.java
  59. 14 4
      common-service/src/main/java/org/diagbot/common/work/ResponseData.java
  60. 1 1
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingFilter.java
  61. 143 0
      common-push/src/main/java/org/diagbot/common/push/bean/Rule.java
  62. 45 0
      common-push/src/main/java/org/diagbot/common/push/bean/RuleApp.java
  63. 62 4
      common-service/src/main/java/org/diagbot/common/work/SearchData.java
  64. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/Detail.java
  65. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/Drugs.java
  66. 1 2
      common-service/src/main/java/org/diagbot/common/javabean/Filnlly.java
  67. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/FuzhenFilnlly.java
  68. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/Indicators.java
  69. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/MangementEvaluation.java
  70. 12 1
      common-service/src/main/java/org/diagbot/common/javabean/MedicalIndication.java
  71. 2 2
      common-service/src/main/java/org/diagbot/common/javabean/MedicalIndicationDetail.java
  72. 1 1
      common-service/src/main/java/org/diagbot/common/javabean/Medicition.java
  73. 190 0
      common-push/src/main/java/org/diagbot/common/push/cache/ApplicationCacheUtil.java
  74. 91 9
      common-push/src/main/java/org/diagbot/common/push/cache/CacheFileManager.java
  75. 1 1
      graph/src/main/java/org/diagbot/graph/util/CacheUtil.java
  76. 96 7
      common-push/src/main/java/org/diagbot/common/push/filter/ClassifyDiag.java
  77. 85 0
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/Pretreatment.java
  78. 39 0
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentDiag.java
  79. 93 0
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentLis.java
  80. 43 0
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentMakeList.java
  81. 46 0
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentNormal.java
  82. 45 0
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentOther.java
  83. 128 0
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentPacs.java
  84. 44 0
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentSymptom.java
  85. 57 0
      common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentVital.java
  86. 189 0
      common-push/src/main/java/org/diagbot/common/push/filter/rule/PretreatmentRule.java
  87. 24 2
      bigdata-web/src/main/java/org/diagbot/bigdata/util/BigDataConstants.java
  88. 258 0
      common-push/src/main/java/org/diagbot/common/push/work/ParamsDataProxy.java
  89. 92 0
      common-push/src/main/java/org/diagbot/common/push/work/RelationExtractionUtil.java
  90. 5 0
      graph-web/pom.xml
  91. 2 10
      graph-web/src/main/java/org/diagbot/graphWeb/controller/GraphController.java
  92. 4 8
      graph-web/src/main/java/org/diagbot/graphWeb/work/DiseaseCalculate.java
  93. 3 4
      graph-web/src/main/java/org/diagbot/graphWeb/work/FilterSortDiag.java
  94. 91 293
      graph-web/src/main/java/org/diagbot/graphWeb/work/GraphCalculate.java
  95. 1 5
      graph-web/src/main/java/org/diagbot/graphWeb/work/HighRiskCalculate.java
  96. 3 3
      graph-web/src/main/java/org/diagbot/graphWeb/work/LisPacsCalculate.java
  97. 0 84
      graph-web/src/main/java/org/diagbot/graphWeb/work/ParamsDataProxy.java
  98. 3 3
      graph-web/src/main/resources/url.properties
  99. 5 1
      graph/pom.xml
  100. 0 0
      graph/src/main/java/org/diagbot/graph/jdbc/Neo4jAPI.java

+ 130 - 0
algorithm/src/main/java/org/algorithm/core/FilterRule.java

@@ -0,0 +1,130 @@
+package org.algorithm.core;
+
+import java.util.Map;
+
+/**
+ * 过滤规则
+ *
+ * @Author: bijl
+ * @Date: 2019/9/5 20:21
+ * @Description:
+ */
+public class FilterRule {
+
+    private Integer uuid;
+
+    private String key_1;
+    private String type_1;
+
+    private String key_2;
+    private String type_2;
+
+    private String inside;
+    private String insideType;
+
+    private String despite;
+    private String despiteInside;
+
+    public FilterRule(Map<String, String> aMap) {
+
+        this.key_1 = aMap.get("key_1");
+        this.type_1 = aMap.get("type_1");
+
+        this.key_2 = aMap.get("key_2");
+        this.type_2 = aMap.get("type_2");
+
+        this.inside = aMap.get("inside");
+        this.insideType = aMap.get("inside_type");
+
+        this.despite = aMap.get("despite");
+        this.despiteInside = aMap.get("despite_inside");
+    }
+
+    public Integer getUuid() {
+        return uuid;
+    }
+
+    public void setUuid(Integer uuid) {
+        this.uuid = uuid;
+    }
+
+
+    public String getKey_1() {
+        return key_1;
+    }
+
+    public void setKey_1(String key_1) {
+        this.key_1 = key_1;
+    }
+
+    public String getType_1() {
+        return type_1;
+    }
+
+    public void setType_1(String type_1) {
+        this.type_1 = type_1;
+    }
+
+    public String getKey_2() {
+        return key_2;
+    }
+
+    public void setKey_2(String key_2) {
+        this.key_2 = key_2;
+    }
+
+    public String getType_2() {
+        return type_2;
+    }
+
+    public void setType_2(String type_2) {
+        this.type_2 = type_2;
+    }
+
+    public String getInside() {
+        return inside;
+    }
+
+    public void setInside(String inside) {
+        this.inside = inside;
+    }
+
+    public String getInsideType() {
+        return insideType;
+    }
+
+    public void setInsideType(String insideType) {
+        this.insideType = insideType;
+    }
+
+    public String getDespite() {
+        return despite;
+    }
+
+    public void setDespite(String despite) {
+        this.despite = despite;
+    }
+
+    public String getDespiteInside() {
+        return despiteInside;
+    }
+
+    public void setDespiteInside(String despiteInside) {
+        this.despiteInside = despiteInside;
+    }
+
+    @Override
+    public String toString() {
+        return "FilterRule{" +
+                "uuid=" + uuid +
+                ", key_1='" + key_1 + '\'' +
+                ", type_1='" + type_1 + '\'' +
+                ", key_2='" + key_2 + '\'' +
+                ", type_2='" + type_2 + '\'' +
+                ", inside='" + inside + '\'' +
+                ", insideType='" + insideType + '\'' +
+                ", despite='" + despite + '\'' +
+                ", despiteInside='" + despiteInside + '\'' +
+                '}';
+    }
+}

+ 282 - 0
algorithm/src/main/java/org/algorithm/core/RelationTreeUtils.java

@@ -0,0 +1,282 @@
+package org.algorithm.core;
+
+import org.algorithm.core.cnn.entity.Lemma;
+import org.algorithm.core.cnn.entity.Triad;
+
+import java.util.*;
+
+/**
+ * 关系树工具类
+ *
+ * @Author: bijl
+ * @Date: 2019/9/5 15:16
+ * @Description:
+ */
+public class RelationTreeUtils {
+
+    /**
+     * 同名实体(这里也叫词项)归并
+     * 规则:
+     * 1- 直接替代为位置最前面的一个
+     *
+     * @param triads 实体对列表
+     */
+    public static void sameTextLemmaMerge(List<Triad> triads) {
+
+        Map<String, Lemma> lemmaMap = new HashMap<>();
+        for (Triad triad : triads) {
+            Lemma l1 = triad.getL_1();
+            Lemma l2 = triad.getL_2();
+
+            if (lemmaMap.get(l1.getText()) == null)
+                lemmaMap.put(l1.getText(), l1);
+            else {
+                Lemma l1Pre = lemmaMap.get(l1.getText());
+                if (l1Pre.getStartPosition() > l1.getStartPosition())
+                    triad.setL_1(l1);  // 取靠前的
+            }
+
+            if (lemmaMap.get(l2.getText()) == null)
+                lemmaMap.put(l2.getText(), l2);
+            else {
+                Lemma l2Pre = lemmaMap.get(l2.getText());
+                if (l2Pre.getStartPosition() > l2.getStartPosition())
+                    triad.setL_2(l2);  // 取靠前的
+            }
+        }
+        for (Triad triad : triads) {
+            Lemma l1 = triad.getL_1();
+            Lemma l2 = triad.getL_2();
+            triad.setL_1(lemmaMap.get(l1.getText()));  // 用前面的同名实体(这里也叫词项)替代后面的
+            triad.setL_2(lemmaMap.get(l2.getText()));  // 用前面的同名实体(这里也叫词项)替代后面的
+        }
+    }
+
+    /**
+     * 构建关系树
+     * 基本规则:
+     * 1- 两个有关系的实体,前面的为父节点,后面的为子节点
+     *
+     * @param triads 有关系的三元组列表
+     */
+    public static void buildRelationTree(List<Triad> triads) {
+        for (Triad triad : triads) {
+            Lemma l1 = triad.getL_1();
+            Lemma l2 = triad.getL_2();
+            if (l1.getStartPosition() < l2.getStartPosition()) {  // 在前者为父节点
+                l1.setHaveChildren(true);
+                l2.setParent(l1);
+            } else {
+                l2.setHaveChildren(true);
+                l1.setParent(l2);
+            }
+        }
+    }
+
+    /**
+     * 获取关系树的分枝
+     *
+     * @param triads 有关系,并且设置了父子节点关系的三元组
+     */
+    public static List<List<String>> getRelationTreeBranches(List<Triad> triads) {
+        Map<Lemma, Integer> leafNodeLemmas = new HashMap<>();
+
+        for (Triad triad : triads) {
+            if (!triad.getL_1().isHaveChildren())
+                leafNodeLemmas.putIfAbsent(triad.getL_1(), 1);
+
+            if (!triad.getL_2().isHaveChildren())
+                leafNodeLemmas.putIfAbsent(triad.getL_2(), 1);
+        }
+
+        List<List<String>> branches = new ArrayList<>();
+        for (Lemma lemma : leafNodeLemmas.keySet()) {
+            List<Lemma> aBranch = new ArrayList<>();
+            while (lemma != null) {
+                aBranch.add(lemma);
+                lemma = lemma.getParent();
+            }
+            aBranch.sort(Comparator.naturalOrder());  // 按位置排序
+            branches.addAll(handleBranch(aBranch));
+        }
+
+
+        return branches;
+    }
+
+    /**
+     * 处理分枝,要求组合非阴性词,阴性词必须包含
+     * 操作:
+     * 1- 分离阴性词和非阴性词
+     * 2- 组合非阴性词
+     * 3- 添加阴性词到组合结果中
+     *
+     * @param aBranch
+     * @return
+     */
+    private static List<List<String>> handleBranch(List<Lemma> aBranch) {
+        List<Lemma> nonNegativeLemmas = new ArrayList<>();
+        List<Lemma> negativeLemmas = new ArrayList<>();
+        for (Lemma lemma : aBranch) {
+            if ("反意或虚拟".equals(lemma.getProperty()))
+                negativeLemmas.add(lemma);
+            else
+                nonNegativeLemmas.add(lemma);
+        }
+        List<List<Lemma>> nonNegativeLemmaCombinations = new ArrayList<>();
+        if (nonNegativeLemmas.size() > 0) {
+            for (int i = 1; i <= nonNegativeLemmas.size(); i++) {
+                combinerSelect(nonNegativeLemmas, new ArrayList<>(), nonNegativeLemmaCombinations,
+                        nonNegativeLemmas.size(), i);
+            }
+        }
+        List<List<String>> result = new ArrayList<>();
+        for (List<Lemma> lemmaCombination : nonNegativeLemmaCombinations) {
+            List<String> lemmaNames = new ArrayList<>();
+            lemmaCombination.addAll(negativeLemmas);  // 阴性词加入到组合中
+            lemmaCombination.sort(Comparator.naturalOrder());  // 按位置排序
+            for (Lemma lemma : lemmaCombination)  // 取出名称
+                lemmaNames.add(lemma.getText());
+            if (lemmaNames.size() >= 2)
+                result.add(lemmaNames);
+        }
+
+        return result;
+
+    }
+
+    /**
+     * 从三元组列表到关系树分枝
+     *
+     * @param triads
+     * @return
+     */
+    public static List<List<String>> triadsToRelationTreeBranches(List<Triad> triads) {
+//        sameTextLemmaMerge(triads);
+        buildRelationTree(triads);
+        return getRelationTreeBranches(triads);
+    }
+
+    /**
+     * 组合生成器
+     *
+     * @param data      原始数据
+     * @param workSpace 自定义一个临时空间,用来存储每次符合条件的值
+     * @param k         C(n,k)中的k
+     */
+    private static <E> void combinerSelect(List<E> data, List<E> workSpace, List<List<E>> result, int n, int k) {
+        List<E> copyData;
+        List<E> copyWorkSpace = null;
+
+        if (workSpace.size() == k) {
+//            for (E c : workSpace)
+//                System.out.print(c);
+
+            result.add(new ArrayList<>(workSpace));
+//            System.out.println();
+        }
+
+        for (int i = 0; i < data.size(); i++) {
+            copyData = new ArrayList<E>(data);
+            copyWorkSpace = new ArrayList<E>(workSpace);
+
+            copyWorkSpace.add(copyData.get(i));
+            for (int j = i; j >= 0; j--)
+                copyData.remove(j);
+            combinerSelect(copyData, copyWorkSpace, result, n, k);
+        }
+    }
+
+    /**
+     * 全排列算法
+     *
+     * @param stringList 字符串列表
+     * @return
+     */
+    public static ArrayList<ArrayList<String>> permute(List<String> stringList) {
+        ArrayList<ArrayList<String>> result = new ArrayList<ArrayList<String>>();
+        result.add(new ArrayList<String>());
+
+        for (int i = 0; i < stringList.size(); i++) {
+            //list of list in current iteration of the stringList num
+            ArrayList<ArrayList<String>> current = new ArrayList<ArrayList<String>>();
+
+            for (ArrayList<String> l : result) {
+                // # of locations to insert is largest index + 1
+                for (int j = 0; j < l.size() + 1; j++) {
+                    // + add num[i] to different locations
+                    l.add(j, stringList.get(i));
+
+                    ArrayList<String> temp = new ArrayList<String>(l);
+                    current.add(temp);
+
+                    // - remove num[i] add
+                    l.remove(j);
+                }
+            }
+
+            result = new ArrayList<>(current);
+        }
+
+        return result;
+    }
+
+
+    /**
+     * 测试文件
+     */
+    public static void test() {
+
+        List<Triad> triads = new ArrayList<>();
+        String[] arr_1 = {"子宫", "0,1", "部位"};
+        String[] arr_2 = {"内膜", "2,3", "结构"};
+        addTriad(arr_1, arr_2, triads);
+
+        String[] arr_1_1 = {"不", "13,13", "反意或虚拟"};
+        String[] arr_2_1 = {"出血", "10,11", "形容词"};
+        addTriad(arr_1_1, arr_2_1, triads);
+
+        String[] arr_1_2 = {"胸部", "15,16", "部位"};
+        String[] arr_2_2 = {"剧烈", "17,18", "程度"};
+        addTriad(arr_1_2, arr_2_2, triads);
+
+        String[] arr_1_3 = {"疼痛", "17,18", "形容词"};
+        String[] arr_2_3 = {"剧烈", "19,20", "程度"};
+        addTriad(arr_1_3, arr_2_3, triads);
+
+        String[] arr_1_4 = {"内膜", "2,3", "结构"};
+        String[] arr_2_4 = {"出血", "10,11", "形容词"};
+        addTriad(arr_1_4, arr_2_4, triads);
+
+        System.out.println(triads.size());
+        sameTextLemmaMerge(triads);
+        buildRelationTree(triads);
+        List<List<String>> info = getRelationTreeBranches(triads);
+
+        System.out.println(info);
+    }
+
+    /**
+     * 增加三元组
+     */
+    private static void addTriad(String[] lemma_1, String[] lemma_2, List<Triad> triads) {
+        Lemma lemma1 = new Lemma();
+        lemma1.setText(lemma_1[0]);
+        lemma1.setPosition(lemma_1[1]);
+        lemma1.setProperty(lemma_1[2]);
+
+        Lemma lemma2 = new Lemma();
+        lemma2.setText(lemma_2[0]);
+        lemma2.setPosition(lemma_2[1]);
+        lemma2.setProperty(lemma_2[2]);
+
+        Triad triad = new Triad();
+        triad.setL_1(lemma1);
+        triad.setL_2(lemma2);
+
+        triads.add(triad);
+
+    }
+
+
+}

+ 493 - 0
algorithm/src/main/java/org/algorithm/core/RuleCheckMachine.java

@@ -0,0 +1,493 @@
+package org.algorithm.core;
+
+import org.algorithm.core.cnn.entity.Lemma;
+import org.algorithm.core.cnn.entity.Triad;
+import org.algorithm.util.MysqlConnector;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+
+/**
+ * 规则检查机器
+ *
+ * @Author: bijl
+ * @Date: 2019/9/6 10:32
+ * @Description:
+ */
+public class RuleCheckMachine {
+    private final List<FilterRule> filterRules = new ArrayList<>();
+    private Map<String, Map<String, Set<Integer>>> key_1_map = null;
+    private Map<String, Map<String, Set<Integer>>> key_2_map = null;
+    private Map<String, String> punctuations = new HashMap<>();
+    private Map<String, Set<Integer>> despiteMap = null;  // 实体名:[规则uuid列表]
+    private Map<String, Set<Integer>> despiteInsideMap = null; // 实体名:[规则uuid列表]
+    private Map<String, Map<String, Set<Integer>>> insideMap = null;
+
+
+    public RuleCheckMachine() {
+        this.loadRules();
+        this.makeKey1Map();
+        this.makeKey2Map();
+        this.makeInsideMap();
+        this.makeDespiteMap();
+        this.makeDespiteInsideMap();
+    }
+
+
+    /**
+     * 加载规则
+     */
+    public void loadRules() {
+        /**
+         * 连接数据库
+         */
+        String url = "jdbc:mysql://192.168.2.235/test_case?user=root&password=diagbot@20180822";
+        MysqlConnector connector = new MysqlConnector(url);
+        String querySql =
+                "SELECT rr.key_1, rr.type_1, rr.key_2, rr.type_2, rr.inside, rr.inside_type, " +
+                        "rr.despite, rr.despite_inside " +
+                        "FROM relation_neg_rules AS rr " +
+                        "WHERE rr.`status` = 1";
+
+        ResultSet rs = connector.query(querySql);
+        Integer uuid = 0;
+        try {
+            while (rs.next()) {
+                String key_1 = rs.getString("key_1");
+                String type_1 = rs.getString("type_1");
+
+                String key_2 = rs.getString("key_2");
+                String type_2 = rs.getString("type_2");
+
+                String inside = rs.getString("inside");
+                String inside_type = rs.getString("inside_type");
+
+                String despite = rs.getString("despite");
+                String despite_inside = rs.getString("despite_inside");
+
+                String[] despiteSplit = despite.split(",");
+                String[] despiteInsideSplit = despite_inside.split(",");
+                for (int j = 0; j < despiteSplit.length; j++) {
+                    for (int k = 0; k < despiteInsideSplit.length; k++) {
+                        Map<String, String> variableMap = new HashMap<>();
+                        variableMap.put("key_1", key_1);
+                        variableMap.put("type_1", type_1);
+
+                        variableMap.put("key_2", key_2);
+                        variableMap.put("type_2", type_2);
+
+                        variableMap.put("inside", inside);
+                        variableMap.put("inside_type", inside_type);
+
+                        variableMap.put("despite", despiteSplit[j]);
+                        variableMap.put("despite_inside", despiteInsideSplit[k]);
+
+                        FilterRule filterRule = new FilterRule(variableMap);
+                        filterRule.setUuid(uuid);
+                        this.filterRules.add(filterRule);
+
+//                            System.out.println(filterRule);
+
+                        uuid += 1;
+                    }
+                }
+
+            }
+
+        } catch (SQLException e) {
+            e.printStackTrace();
+            throw new RuntimeException("加载规则字典失败");
+        } finally {
+            connector.close();
+        }
+    }
+
+    /**
+     * 制作实体1相关信息字典
+     */
+    private void makeKey1Map() {
+        Map<String, Map<String, Set<Integer>>> key_1_map_ = new HashMap<>();
+        Map<String, Set<Integer>> emptyMap = new HashMap<>();
+        Map<String, Set<Integer>> typeMap = new HashMap<>();
+        Map<String, Set<Integer>> wordMap = new HashMap<>();
+        key_1_map_.put("", emptyMap);
+        key_1_map_.put("type", typeMap);
+        key_1_map_.put("word", wordMap);
+
+        for (FilterRule rule : this.filterRules) {
+            String key_1 = rule.getKey_1();
+            String type_1 = rule.getType_1();
+            Integer uuid = rule.getUuid();
+
+            this.inputMaps(key_1, type_1, uuid, emptyMap, typeMap, wordMap, null);
+        }
+        this.key_1_map = key_1_map_;
+    }
+
+
+    /**
+     * 制作实体2相关信息字典
+     */
+    private void makeKey2Map() {
+        Map<String, Map<String, Set<Integer>>> key_2_map_ = new HashMap<>();
+        Map<String, Set<Integer>> emptyMap = new HashMap<>();
+        Map<String, Set<Integer>> typeMap = new HashMap<>();
+        Map<String, Set<Integer>> wordMap = new HashMap<>();
+        key_2_map_.put("", emptyMap);
+        key_2_map_.put("type", typeMap);
+        key_2_map_.put("word", wordMap);
+
+        for (FilterRule rule : this.filterRules) {
+            String key_2 = rule.getKey_2();
+            String type_2 = rule.getType_2();
+            Integer uuid = rule.getUuid();
+
+            this.inputMaps(key_2, type_2, uuid, emptyMap, typeMap, wordMap, null);
+        }
+        this.key_2_map = key_2_map_;
+    }
+
+    /**
+     * 制作内部实体相关信息字典
+     */
+    private void makeInsideMap() {
+        Map<String, Map<String, Set<Integer>>> insideMap_ = new HashMap<>();
+        Map<String, Set<Integer>> punctuationMap = new HashMap<>();
+        Map<String, Set<Integer>> typeMap = new HashMap<>();
+        Map<String, Set<Integer>> typePunctuationMap = new HashMap<>();
+        Map<String, Set<Integer>> wordMap = new HashMap<>();
+        insideMap_.put("punc", punctuationMap);
+        insideMap_.put("type", typeMap);
+        insideMap_.put("typePunctuation", typePunctuationMap);
+        insideMap_.put("word", wordMap);
+
+        for (FilterRule rule : this.filterRules) {
+            String inside = rule.getInside();
+            String insideType = rule.getInsideType();
+            Integer uuid = rule.getUuid();
+            if (insideType.equals("punc"))
+                this.punctuations.put(inside, inside);
+
+            if (",".equals(inside.substring(0, 1)))
+                this.inputMaps(inside, insideType, uuid, null, typePunctuationMap, wordMap, punctuationMap);
+            else
+                this.inputMaps(inside, insideType, uuid, null, typeMap, wordMap, punctuationMap);
+        }
+        this.insideMap = insideMap_;
+    }
+
+    /**
+     * maps输入
+     *
+     * @param key
+     * @param type
+     * @param uuid
+     * @param emptyMap
+     * @param typeMap
+     * @param wordMap
+     */
+    private void inputMaps(String key, String type, Integer uuid, Map<String, Set<Integer>> emptyMap,
+                           Map<String, Set<Integer>> typeMap, Map<String, Set<Integer>> wordMap,
+                           Map<String, Set<Integer>> punctuationMap) {
+
+        if ("".equals(type)) {
+            if (emptyMap.get(key) == null)
+                emptyMap.put(key, new HashSet<>());
+            emptyMap.get(key).add(uuid);
+        } else if ("type".equals(type)) {
+            if (typeMap.get(key) == null)
+                typeMap.put(key, new HashSet<>());
+            typeMap.get(key).add(uuid);
+        } else if ("word".equals(type)) {
+            if (wordMap.get(key) == null)
+                wordMap.put(key, new HashSet<>());
+            wordMap.get(key).add(uuid);
+        } else if ("punc".equals(type)) {
+            if (punctuationMap.get(key) == null)
+                punctuationMap.put(key, new HashSet<>());
+            punctuationMap.get(key).add(uuid);
+        } else {
+            throw new RuntimeException("出现了位置新type");
+        }
+
+    }
+
+
+    /**
+     * 制作例外字典
+     */
+    private void makeDespiteMap() {
+        Map<String, Set<Integer>> despiteMap = new HashMap<>();
+        for (FilterRule rule : this.filterRules) {
+            String despite = rule.getDespite();
+            if (!despite.equals("")) {  // 空白不收录
+                if (despiteMap.get(despite) == null) {
+                    despiteMap.put(despite, new HashSet<>());
+                }
+                despiteMap.get(despite).add(rule.getUuid());  //
+            }
+        }
+        this.despiteMap = despiteMap;
+    }
+
+
+    /**
+     * 制作例外_内部字典
+     */
+    private void makeDespiteInsideMap() {
+        Map<String, Set<Integer>> despiteInsideMap = new HashMap<>();
+        for (FilterRule rule : this.filterRules) {
+            String despiteInside = rule.getDespiteInside();
+            if (!despiteInside.equals("")) {  // 空白不收录
+                if (despiteInsideMap.get(despiteInside) == null) {
+                    despiteInsideMap.put(despiteInside, new HashSet<>());
+                }
+                despiteInsideMap.get(despiteInside).add(rule.getUuid());  //
+            }
+        }
+        this.despiteInsideMap = despiteInsideMap;
+    }
+
+    /**
+     * 名称—类别—开始位置类
+     */
+    class NameTypeStartPosition implements Comparable<NameTypeStartPosition> {
+        private String name;
+        private String type;
+        private int startPosition;
+
+        public NameTypeStartPosition(String name, String type, int startPosition) {
+            this.name = name;
+            this.type = type;
+            this.startPosition = startPosition;
+        }
+
+        @Override
+        public int compareTo(NameTypeStartPosition o) {
+            return this.startPosition - o.getStartPosition();
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public void setName(String name) {
+            this.name = name;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public void setType(String type) {
+            this.type = type;
+        }
+
+        public int getStartPosition() {
+            return startPosition;
+        }
+
+        public void setStartPosition(int startPosition) {
+            this.startPosition = startPosition;
+        }
+
+        @Override
+        public String toString() {
+            return "NameTypeStartPosition{" +
+                    "name='" + name + '\'' +
+                    ", type='" + type + '\'' +
+                    ", startPosition=" + startPosition +
+                    '}';
+        }
+
+    }
+
+    /**
+     * 获取已排序的(名称,类别,开始位置)对象
+     *
+     * @param triads
+     * @return
+     */
+    public List<NameTypeStartPosition> getSortedNameTypeByPosition(List<Triad> triads) {
+        List<NameTypeStartPosition> nameTypeStartPositions = new ArrayList<>();
+        for (Triad triad : triads) {
+            Lemma l1 = triad.getL_1();
+            Lemma l2 = triad.getL_2();
+            nameTypeStartPositions.add(
+                    new NameTypeStartPosition(l1.getText(), l1.getProperty(), l1.getStartPosition()));
+            nameTypeStartPositions.add(
+                    new NameTypeStartPosition(l2.getText(), l2.getProperty(), l2.getStartPosition()));
+        }
+        nameTypeStartPositions.sort(Comparator.naturalOrder());
+
+        return nameTypeStartPositions;
+    }
+
+    /**
+     * 是否移除
+     *
+     * @param nameTypeStartPositions
+     * @param startIndex
+     * @param endIndex
+     * @return
+     */
+    public boolean isRemove(List<NameTypeStartPosition> nameTypeStartPositions, int startIndex, int endIndex,
+                            String sentence) {
+        Set<Integer> remainUuids = new HashSet<>();  // 剩余规则的uuid
+        for (FilterRule rule : this.filterRules)
+            remainUuids.add(rule.getUuid());
+
+        // 过滤实体名称触发例外条件情况
+        String entity_1_name = nameTypeStartPositions.get(startIndex).getName();
+        String entity_1_type = nameTypeStartPositions.get(startIndex).getType();
+
+        String entity_2_name = nameTypeStartPositions.get(endIndex).getType();
+        String entity_2_type = nameTypeStartPositions.get(endIndex).getType();
+
+        Set<Integer> set = null;
+        set = this.despiteMap.get(entity_1_name);  // 过滤有实体1名为例外情况(即,不成立)的规则(的uuid)
+        this.removeAll(remainUuids, set);
+
+        set = this.despiteMap.get(entity_2_name);  // 过滤有实体2名为例外情况(即,不成立)的规则(的uuid)
+        this.removeAll(remainUuids, set);
+
+        // 过滤中间实体的名称触发例外条件情况
+        for (int i = startIndex; i <= endIndex; i++) {
+            NameTypeStartPosition nameTypeStartPosition = nameTypeStartPositions.get(i);
+            set = this.despiteInsideMap.get(nameTypeStartPosition.getName());
+            this.removeAll(remainUuids, set);
+        }
+
+        // 三板斧过滤
+        // 实体1,过滤
+        set = new HashSet<>();
+        this.addAll(set, this.key_1_map.get("").get(""));
+        // 满足,形如("形容词", "type") 过滤条件的规则
+        this.addAll(set, this.key_1_map.get("type").get(entity_1_type));
+        // 满足,形如("胸痛", "word") 过滤条件的规则
+        this.addAll(set, this.key_1_map.get("word").get(entity_1_name));
+        this.retainAll(remainUuids, set);  // 求交集,同事满足实体1相关的过滤条件,且不不满足例外情况
+        if (remainUuids.size() == 0)
+            return false;
+
+        // 实体2,过滤
+        set = new HashSet<>();
+        this.addAll(set, this.key_2_map.get("").get(""));
+        // 满足,形如("形容词", "type") 过滤条件的规则
+        this.addAll(set, this.key_2_map.get("type").get(entity_2_type));
+        // 满足,形如("胸痛", "word") 过滤条件的规则
+        this.addAll(set, this.key_2_map.get("word").get(entity_2_name));
+        this.retainAll(remainUuids, set);  // 求交集,同事满足实体1相关的过滤条件,且不不满足例外情况
+        if (remainUuids.size() == 0)
+            return false;
+
+        // 中间实体过滤
+        set = new HashSet<>();
+        for (int i = startIndex; i <= endIndex; i++) {
+            NameTypeStartPosition nameTypeStartPosition = nameTypeStartPositions.get(i);
+            // 中间实体满足,形如("胸痛", "word") 过滤条件的规则
+            this.addAll(set, this.insideMap.get("word").get(nameTypeStartPosition.getName()));
+            // 中间实体满足,形如(";", "punc") 过滤条件的规则
+            this.addAll(set, this.insideMap.get("type").get(nameTypeStartPosition.getType()));  // 没有逗号的
+        }
+
+        int entity_1_start = nameTypeStartPositions.get(startIndex).getStartPosition();
+        int entity_2_start = nameTypeStartPositions.get(endIndex).getStartPosition();
+
+        // 标点过滤
+        String aPunc = null;
+        for (int i=entity_1_start; i<entity_2_start;i++){
+            aPunc = sentence.substring(i, i+1);
+            if (this.punctuations.get(aPunc) != null)
+                this.addAll(set, this.insideMap.get("punc").get(aPunc));
+        }
+
+        // 中文和英文逗号+属性 过滤
+        String[] commas = {",", ","};
+        int commaIndex = 0;
+        String commaPadType = null;  // 逗号拼接上类型
+        for (String comma: commas) {
+            commaIndex = sentence.indexOf(comma, entity_1_start + 1);  // 逗号位置
+            while (commaIndex > -1 && commaIndex < entity_2_start) {
+                commaIndex = sentence.indexOf(comma, commaIndex + 1);  // 下一个逗号
+                for (int i = startIndex; i <= endIndex; i++) {  // 每个逗号与后面的所有实体都匹配一次
+                    NameTypeStartPosition nameTypeStartPosition = nameTypeStartPositions.get(i);
+                    if (nameTypeStartPosition.getStartPosition() > commaIndex) {
+                        commaPadType = "," + nameTypeStartPosition.getType();
+                        this.addAll(set, this.insideMap.get("typePunctuation").get(commaPadType));
+                    }
+
+                }
+            }
+
+        }
+
+        this.retainAll(remainUuids, set);  // 求交集,同事中间实体相关的过滤条件,且不不满足例外情况
+
+//        for (FilterRule rule: this.filterRules) {
+//            if (remainUuids.contains(rule.getUuid()))
+//                System.out.println(rule);
+//
+//        }
+
+        return remainUuids.size() > 0;  // 还有规则满足,则过滤
+
+    }
+
+    /**
+     * 求差集,避免null和空集
+     *
+     * @param basicSet
+     * @param set
+     */
+    private void removeAll(Set<Integer> basicSet, Set<Integer> set) {
+        if (set != null && set.size() > 0)
+            basicSet.removeAll(set);
+    }
+
+    /**
+     * 求交集,避免null和空集
+     *
+     * @param basicSet
+     * @param set
+     */
+    private void addAll(Set<Integer> basicSet, Set<Integer> set) {
+        if (set != null && set.size() > 0)
+            basicSet.addAll(set);
+    }
+
+    /**
+     * 求并集,避免null和空集
+     *
+     * @param basicSet
+     * @param set
+     */
+    private void retainAll(Set<Integer> basicSet, Set<Integer> set) {
+        if (set != null && set.size() > 0)
+            basicSet.retainAll(set);
+    }
+
+    /**
+     * 检查并移除
+     *
+     * @param sentence 句子
+     * @param triads 三元组列表
+     */
+    public void checkAndRemove(String sentence, List<Triad> triads) {
+        List<NameTypeStartPosition> nameTypeStartPositions = this.getSortedNameTypeByPosition(triads);
+        Map<Integer, Integer> startPositionToIndexMap = new HashMap<>();
+        for (int i = 0; i < nameTypeStartPositions.size(); i++)
+            startPositionToIndexMap.put(nameTypeStartPositions.get(i).getStartPosition(), i);
+
+        Iterator<Triad> it = triads.iterator();
+        while (it.hasNext()) {  // 遍历三元组,移除满足过滤规则的
+            Triad triad = it.next();
+            int startIndex = startPositionToIndexMap.get(triad.getL_1().getStartPosition());
+            int endIndex = startPositionToIndexMap.get(triad.getL_2().getStartPosition());
+            if (isRemove(nameTypeStartPositions, startIndex, endIndex, sentence)) {
+                it.remove();
+            }
+        }
+    }
+}

+ 3 - 4
algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutor.java

@@ -1,13 +1,12 @@
 package org.algorithm.core.cnn;
 
-import org.algorithm.core.cnn.entity.Lemma;
 import org.algorithm.core.cnn.entity.Triad;
 
 import java.util.List;
 
 /**
  * @ClassName org.algorithm.core.cnn.model.AlgorithmCNNExecutor
- * @Description TODO
+ * @Description
  * @Author fyeman
  * @Date 2019/1/17/017 19:18
  * @Version 1.0
@@ -16,8 +15,8 @@ public abstract class AlgorithmCNNExecutor {
     /**
      *
      * @param content 输入句子
-     * @param triads 实体列表
-     * @return
+     * @param triads 实体列表(三元组列表)
+     * @return  [[有关系的一系列词]]
      */
     public abstract List<Triad> execute(String content, List<Triad> triads);
 }

+ 22 - 0
algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutorPacs.java

@@ -0,0 +1,22 @@
+package org.algorithm.core.cnn;
+
+import org.algorithm.core.cnn.entity.Triad;
+
+import java.util.List;
+
+/**
+ * @ClassName org.algorithm.core.cnn.model.AlgorithmCNNExecutor
+ * @Description
+ * @Author fyeman
+ * @Date 2019/1/17/017 19:18
+ * @Version 1.0
+ **/
+public abstract class AlgorithmCNNExecutorPacs {
+    /**
+     *
+     * @param content 输入句子
+     * @param triads 实体列表(三元组列表)
+     * @return  [[有关系的一系列词]]
+     */
+    public abstract List<List<String>>  execute(String content, List<Triad> triads);
+}

+ 1 - 1
algorithm/src/main/java/org/algorithm/core/cnn/dataset/RelationExtractionDataSet.java

@@ -17,7 +17,7 @@ import com.alibaba.fastjson.JSONObject;
 public class RelationExtractionDataSet {
 
     private Map<String, Integer> char2id = new HashMap<>();
-    public final int MAX_LEN = 512;
+    public final int MAX_LEN = 256;
 
 
     public RelationExtractionDataSet(String dir) {

+ 32 - 1
algorithm/src/main/java/org/algorithm/core/cnn/entity/Lemma.java

@@ -10,12 +10,38 @@ import java.util.List;
  * @Date 2019/1/17/017 19:15
  * @Version 1.0
  **/
-public class Lemma {
+public class Lemma implements Comparable<Lemma> {
     private String text;
     private String position;
     private int len;
     private String property;
 
+    private Lemma parent;
+
+    private boolean haveChildren = false;
+
+    public boolean isHaveChildren() {
+        return haveChildren;
+    }
+
+    public void setHaveChildren(boolean haveChildren) {
+        this.haveChildren = haveChildren;
+    }
+
+    public Lemma getParent() {
+        return parent;
+    }
+
+    public void setParent(Lemma parent) {
+        this.parent = parent;
+    }
+
+    public int getStartPosition() {
+        String[] pos = this.position.split(",");
+        return Integer.parseInt(pos[0]);
+    }
+
+
     private List<Lemma> relationLemmas = new ArrayList<>();
 
     public String getText() {
@@ -64,4 +90,9 @@ public class Lemma {
         }
         relationLemmas.add(l);
     }
+
+    @Override
+    public int compareTo(Lemma o) {
+        return this.getStartPosition() - o.getStartPosition();
+    }
 }

+ 40 - 13
algorithm/src/main/java/org/algorithm/core/cnn/model/RelationExtractionEnsembleModel.java

@@ -1,6 +1,8 @@
 package org.algorithm.core.cnn.model;
 
-import org.algorithm.core.cnn.AlgorithmCNNExecutor;
+import org.algorithm.core.RelationTreeUtils;
+import org.algorithm.core.RuleCheckMachine;
+import org.algorithm.core.cnn.AlgorithmCNNExecutorPacs;
 import org.algorithm.core.cnn.dataset.RelationExtractionDataSet;
 import org.algorithm.core.cnn.entity.Triad;
 import org.diagbot.pub.utils.PropertiesUtil;
@@ -21,7 +23,7 @@ import java.util.concurrent.*;
  * @Date: 2019/1/22 10:21
  * @Description: 集成模型
  */
-public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
+public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutorPacs {
     private final String X_PLACEHOLDER = "X";
     private final String PREDICTION = "prediction/prediction";
     private final int NUM_LABEL = 1;
@@ -30,8 +32,10 @@ public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
     private RelationExtractionDataSet dataSet;
     private RelationExtractionSubModel[] subModels = new RelationExtractionSubModel[2];
     private ExecutorService executorService = Executors.newCachedThreadPool();
+    private final RuleCheckMachine ruleCheckMachine = new RuleCheckMachine();
 
     public RelationExtractionEnsembleModel() {
+        // 解析路径
         PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
 
         String modelsPath = prop.getProperty("basicPath");  // 模型基本路径
@@ -39,18 +43,20 @@ public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
         dataSetPath = dataSetPath + File.separator + "char2id.json";
         String exportDir = modelsPath.replace("model_version_replacement", "ensemble_model_2");
 
+        // 加载数据集和初始化集成模型
         this.dataSet = new RelationExtractionDataSet(dataSetPath);
         this.init(exportDir);
 
+        // 添加子模型系数,并加载子模型cnn_1d_low
         Map<String, Tensor<Float>> cnn_1d_low_map = new HashMap<>();
-        cnn_1d_low_map.put("keep_prob",Tensor.create(1.0f, Float.class));
+        cnn_1d_low_map.put("keep_prob", Tensor.create(1.0f, Float.class));
         subModels[0] = new RelationExtractionSubModel("cnn_1d_low", cnn_1d_low_map);
-//        subModels[1] = new RelationExtractionSubModel("cnn_1d_lstm_low");
 
+        // 添加子模型系数,并加载子模型lstm_low_api
         Map<String, Tensor<Float>> lstm_low_api_map = new HashMap<>();
-        lstm_low_api_map.put("input_keep_prob",Tensor.create(1.0f, Float.class));
-        lstm_low_api_map.put("output_keep_prob",Tensor.create(1.0f, Float.class));
-        lstm_low_api_map.put("state_keep_prob",Tensor.create(1.0f, Float.class));
+        lstm_low_api_map.put("input_keep_prob", Tensor.create(1.0f, Float.class));
+        lstm_low_api_map.put("output_keep_prob", Tensor.create(1.0f, Float.class));
+        lstm_low_api_map.put("state_keep_prob", Tensor.create(1.0f, Float.class));
         subModels[1] = new RelationExtractionSubModel("lstm_low_api", lstm_low_api_map);
     }
 
@@ -92,12 +98,24 @@ public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
         return inputValues;
     }
 
+
+    /**
+     * 数据预处理,包括过滤,等操作
+     * @param content
+     * @param triads
+     */
+    private void preProcess(String content, List<Triad> triads){
+        if (!(content.length() > this.dataSet.MAX_LEN) && triads.size() > 0) // 句子长度不超过MAX_LEN,有三元组
+            this.ruleCheckMachine.checkAndRemove(content, triads);
+    }
+
     @Override
-    public List<Triad> execute(String content, List<Triad> triads) {
-        // 句子长度不超过MAX_LEN,有三元组
-        if (content.length() > this.dataSet.MAX_LEN || triads.size() < 1) {
-            return new ArrayList<>();
-        }
+    public List<List<String>> execute(String content, List<Triad> triads) {
+        // 预处理
+        this.preProcess(content, triads);
+        if (content.length() > this.dataSet.MAX_LEN || triads.size() < 1)  // 句子长度不超过MAX_LEN,有三元组
+            return null;
+
         int[][] inputValues = this.convertData(content, triads);  // shape = [3, batchSize * this.subModels.length]
         int batchSize = triads.size();
 
@@ -159,7 +177,16 @@ public class RelationExtractionEnsembleModel extends AlgorithmCNNExecutor {
         for (Triad triad : deleteTriads)
             triads.remove(triad);
 
-        return triads;
+        return this.triadsToRelationTreeBranches(triads);
+    }
+
+    /**
+     * 从三元组列表到关系树分枝
+     * @param triads
+     * @return
+     */
+    public List<List<String>> triadsToRelationTreeBranches(List<Triad> triads) {
+        return RelationTreeUtils.triadsToRelationTreeBranches(triads);
     }
 
 

+ 4 - 4
algorithm/src/main/java/org/algorithm/core/cnn/model/RelationExtractionModel.java

@@ -4,7 +4,7 @@ import com.alibaba.fastjson.JSON;
 import com.alibaba.fastjson.JSONArray;
 import com.alibaba.fastjson.JSONObject;
 import com.alibaba.fastjson.TypeReference;
-import org.algorithm.core.cnn.AlgorithmCNNExecutor;
+import org.algorithm.core.cnn.AlgorithmCNNExecutorPacs;
 import org.algorithm.core.cnn.dataset.RelationExtractionDataSet;
 import org.algorithm.core.cnn.entity.LemmaInfo;
 import org.algorithm.core.cnn.entity.Triad;
@@ -21,7 +21,7 @@ import java.util.List;
  * @Date: 2019/1/22 10:21
  * @Decription:
  */
-public class RelationExtractionModel extends AlgorithmCNNExecutor {
+public class RelationExtractionModel extends AlgorithmCNNExecutorPacs {
 //    self.X = tf.placeholder(tf.int32, shape=[None, self.max_length], name='X')
 //    self.pos1 = tf.placeholder(tf.int32, shape=[None, self.max_length], name='pos1')
 //    self.pos2 = tf.placeholder(tf.int32, shape=[None, self.max_length], name='pos2')
@@ -54,7 +54,7 @@ public class RelationExtractionModel extends AlgorithmCNNExecutor {
     }
 
     @Override
-    public List<Triad> execute(String content, List<Triad> triads) {
+    public List<List<String>> execute(String content, List<Triad> triads) {
 //        List<Lemma[]> combinations = new ArrayList<>();
 //        // 组合
 //        for(int i=0; i < lemmas.size() - 1; i++){  // 两两组合成实体对
@@ -83,7 +83,7 @@ public class RelationExtractionModel extends AlgorithmCNNExecutor {
 //            }
 //
 //        }
-        return triads;
+        return null;
     }
 
     /**

+ 2 - 1
algorithm/src/main/java/org/algorithm/core/neural/DiagnosisPredictExecutor.java

@@ -15,7 +15,8 @@ public class DiagnosisPredictExecutor extends AlgorithmNeuralExecutor {
     public DiagnosisPredictExecutor() {
         String modelVersion = "diagnosisPredict.version";
 
-        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+//        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+        this.model = TensorFlowModelLoadFactory.createAndFilterDiagnosis(modelVersion);  // 加了疾病过滤
     }
 
 }

+ 30 - 7
algorithm/src/main/java/org/algorithm/core/neural/TensorFlowModelLoadFactory.java

@@ -11,13 +11,10 @@ import org.diagbot.pub.utils.PropertiesUtil;
  * @Description:
  */
 public class TensorFlowModelLoadFactory {
-    
+
     /**
      * 加载并创建模型类
-     * @param exportDir  模型保存地址
-     * @param inputOpName  输入op的名称
-     * @param outputOpName  输出op的名称
-     * @param dataSet     模型使用的数据集
+     * @param modelVersion  模型版本号
      * @return 模型
      */
     public static TensorflowModel create(String modelVersion) {
@@ -28,10 +25,9 @@ public class TensorFlowModelLoadFactory {
         String inputOpName = "X";  // 统一输入op名称
         String outputOpName = "softmax/softmax";  // 统一输出op名称
         
-        // TODO:修改的地方
 //        NNDataSet dataSet = new NNDataSetImplNonParallel(modelVersion);  // 新模型
         NNDataSet dataSet = new NNDataSetImpl(modelVersion);  // 老模型
-        
+
         String modelPath =prop.getProperty("basicPath");  // 模型基本路径
         modelVersion = prop.getProperty(modelVersion);
         modelPath = modelPath.replace("model_version_replacement", modelVersion);  // 生成模型路径
@@ -41,4 +37,31 @@ public class TensorFlowModelLoadFactory {
         return tm;
     }
 
+    /**
+     * 加载并创建模型类
+     * @param modelVersion  模型版本号
+     * @return 模型
+     */
+    public static TensorflowModel createAndFilterDiagnosis(String modelVersion) {
+
+
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+
+        String inputOpName = "X";  // 统一输入op名称
+        String outputOpName = "softmax/softmax";  // 统一输出op名称
+
+        NNDataSet dataSet = new NNDataSetImpl(modelVersion);  // 老模型
+
+        dataSet.setDoFilterDiagnosis(true);
+        dataSet.readFilterDiagnosisDict();
+
+        String modelPath =prop.getProperty("basicPath");  // 模型基本路径
+        modelVersion = prop.getProperty(modelVersion);
+        modelPath = modelPath.replace("model_version_replacement", modelVersion);  // 生成模型路径
+
+        TensorflowModel tm = new TensorflowModel(modelPath, inputOpName, outputOpName,
+                dataSet);
+        return tm;
+    }
+
 }

+ 122 - 13
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSet.java

@@ -1,10 +1,10 @@
 package org.algorithm.core.neural.dataset;
 
-import java.util.HashMap;
-import java.util.Map;
+import java.util.*;
 
 /**
  * 神经网络用数据处理模块
+ *
  * @Author: bijl
  * @Date: 2018年7月20日-下午4:01:34
  * @Description:
@@ -13,17 +13,20 @@ public abstract class NNDataSet {
     protected final int NUM_FEATURE;
     private final int NUM_LABEL;
     protected final Map<String, Integer> FEATURE_DICT = new HashMap<>();
-    
+
     // 新版本新加的三种关键词
     protected final Map<String, Integer> PARTBODY_DICT = new HashMap<>();
     protected final Map<String, Integer> PROPERTY_DICT = new HashMap<>();
     protected final Map<String, Integer> DURATION_DICT = new HashMap<>();
-    
+
     protected final Map<String, Integer> LABEL_DICT = new HashMap<>();
     protected final Map<String, Integer> NEGATIVE_DICT = new HashMap<>();
+    protected final Map<String, String> RE_SPLIT_WORD_DICT = new HashMap<>();
+    protected final Map<String, Map<String, Integer>> RELATED_DIAGNOSIS_DICT = new HashMap<>();
+    protected final List<String> FEATURE_NAME_STORE = new ArrayList<>();
     private final String[] FEATURE_DICT_ARRAY;
     private final String[] LABEL_DICT_ARRAY;
-
+    private boolean doFilterDiagnosis = false;
 
     public NNDataSet(String modelAndVersion) {
         this.readDict(modelAndVersion);
@@ -32,10 +35,12 @@ public abstract class NNDataSet {
         this.FEATURE_DICT_ARRAY = new String[this.NUM_FEATURE];
         this.LABEL_DICT_ARRAY = new String[this.NUM_LABEL];
         this.makeDictArr();
+        this.readReSplitWordDict();
     }
-    
+
     /**
      * 装外部输入转为特征向量
+     *
      * @param inputs
      * @return
      */
@@ -45,28 +50,118 @@ public abstract class NNDataSet {
      * 读取特征和类别字典
      */
     public abstract void readDict(String modelAndVersion);
-    
+
+    /**
+     * 读取再分词字典
+     */
+    public abstract void readReSplitWordDict();
+
+    /**
+     * 读取过滤字典
+     */
+    public abstract void readFilterDiagnosisDict();
+
     /**
      * 生成字典列表
      */
     private void makeDictArr() {
-        for (Map.Entry<String, Integer> entry : this.FEATURE_DICT.entrySet()) 
+        for (Map.Entry<String, Integer> entry : this.FEATURE_DICT.entrySet())
             this.FEATURE_DICT_ARRAY[entry.getValue()] = entry.getKey();
-        
-        for (Map.Entry<String, Integer> entry : this.LABEL_DICT.entrySet()) 
+
+        for (Map.Entry<String, Integer> entry : this.LABEL_DICT.entrySet())
             this.LABEL_DICT_ARRAY[entry.getValue()] = entry.getKey();
-        
+
+    }
+
+    /**
+     * 打包特征名和概率 + 过滤疾病
+     * 基本操作,过滤前20个疾病,如果
+     *
+     * @param predict 模型输出
+     * @return
+     */
+    public Map<String, Float> wrapAndFilter(float[][] predict) {
+        List<NameAndValue> nameAndValueList = new ArrayList<>();
+        for (int i = 0; i < predict[0].length; i++)
+            nameAndValueList.add(new NameAndValue(this.LABEL_DICT_ARRAY[i], predict[0][i]));
+        nameAndValueList.sort(Comparator.reverseOrder());  // 按概率从大到小排列
+
+        Map<String, Float> result = new HashMap<>();
+        Integer cnt = 0;
+        String diagnosis;
+        NameAndValue nameAndValue;
+        Map<String, Integer> relatedDiagnoses = null;
+        for (int i = 0; i < nameAndValueList.size(); i++) {
+            nameAndValue = nameAndValueList.get(i);
+            diagnosis = nameAndValue.getName();
+            for (String featureName : this.FEATURE_NAME_STORE) {
+                relatedDiagnoses = this.RELATED_DIAGNOSIS_DICT.get(featureName);
+                if (relatedDiagnoses != null && relatedDiagnoses.get(diagnosis) != null) {
+                    result.put(nameAndValue.getName(), nameAndValue.getValue());
+                    cnt += 1;
+                }
+            }
+            if ((i >= 20 || i >= 50) && cnt > 0)  // 如果前20或50个推送中有相关的疾病,只过滤他们
+                break;
+        }
+        return result;
+    }
+
+    /**
+     * 用于排序的类
+     */
+    class NameAndValue implements Comparable<NameAndValue> {
+
+        private String name;
+        private Float value;
+
+        NameAndValue(String name, Float value) {
+            this.name = name;
+            this.value = value;
+        }
+
+        @Override
+        public int compareTo(NameAndValue o) {
+            if (this.value > o.getValue())
+                return 1;
+            else if (this.value.equals(o.getValue()))
+                return 0;
+            else
+                return -1;
+        }
+
+        public Float getValue() {
+            return value;
+        }
+
+        public String getName() {
+            return name;
+        }
     }
 
     /**
      * 打包模型输出结果给调用者
-     * 
+     *
      * @param predict 模型输出
      * @return
      */
     public Map<String, Float> wrap(float[][] predict) {
+        if (this.doFilterDiagnosis)  // 过滤疾病
+            return this.wrapAndFilter(predict);
+        else
+            return this.basicWrap(predict);
+    }
+
+
+    /**
+     * 打包模型输出结果给调用者
+     *
+     * @param predict 模型输出
+     * @return
+     */
+    public Map<String, Float> basicWrap(float[][] predict) {
         Map<String, Float> result = new HashMap<>();
-        for (int i=0; i<predict[0].length; i++) {  // 只返回一维向量
+        for (int i = 0; i < predict[0].length; i++) {  // 只返回一维向量
             result.put(this.LABEL_DICT_ARRAY[i], predict[0][i]);
         }
         return result;
@@ -79,6 +174,15 @@ public abstract class NNDataSet {
         return this.NUM_FEATURE;
     }
 
+    /**
+     *  存储特征名称
+     * @param features
+     */
+    public void storeFeatureNames(Map<String, Map<String, String>> features){
+        this.FEATURE_NAME_STORE.clear();
+        this.FEATURE_NAME_STORE.addAll(features.keySet());
+    }
+
     /**
      * @return
      */
@@ -86,4 +190,9 @@ public abstract class NNDataSet {
         return this.NUM_LABEL;
     }
 
+
+    public void setDoFilterDiagnosis(boolean doFilterDiagnosis) {
+        this.doFilterDiagnosis = doFilterDiagnosis;
+    }
+
 }

+ 113 - 73
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImpl.java

@@ -3,6 +3,7 @@ package org.algorithm.core.neural.dataset;
 import org.algorithm.util.TextFileReader;
 import org.diagbot.pub.utils.PropertiesUtil;
 
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -10,7 +11,7 @@ import java.util.Map.Entry;
 
 /**
  * 门诊诊断推送用数据集
- * 
+ *
  * @Author: bijl
  * @Date: 2018年7月26日-上午10:19:43
  * @Description:
@@ -22,9 +23,13 @@ public class NNDataSetImpl extends NNDataSet {
         super(modelAndVersion);
     }
 
-
     @Override
     public float[] toFeatureVector(Map<String, Map<String, String>> inputs) {
+
+        // 新添加的操作
+        this.reSplitWord(inputs);  // 再分词
+        this.storeFeatureNames(inputs);  // 保存特征名称
+
         float[] featureVector = new float[this.NUM_FEATURE];
 
         Iterator<Entry<String, Map<String, String>>> entries = inputs.entrySet().iterator();
@@ -32,13 +37,9 @@ public class NNDataSetImpl extends NNDataSet {
         String featureName = "";
         Integer position = -1;
         Integer negative = 0;
-        // Integer partbodyValue = 0;
         float positive_value = 1.0f;
         float negative_value = -1.0f;
         Map<String, String> featureValues = null;
-        // String partbody = null;
-        // String[] partbodys = null;
-        // String sn = null;
 
         /**
          * 数据方案设计
@@ -51,11 +52,6 @@ public class NNDataSetImpl extends NNDataSet {
             featureValues = entry.getValue();
             position = this.FEATURE_DICT.get(featureName);
             negative = NEGATIVE_DICT.get(featureValues.get("negative"));
-            // 突出主症状的数据方案
-            // sn = featureValues.get("sn");
-            // if("0".equals(sn)) {
-            // negative = negative * 10;
-            // }
 
             if (position != null)
                 if (negative == 1)
@@ -65,91 +61,36 @@ public class NNDataSetImpl extends NNDataSet {
                 else
                     System.out.println("New Nagetive! This may lead to an error.");
 
-
-
-            /**
-             * 部位附属症状数据表示方案 partbodyValue = this.PARTBODY_DICT.get(featureValues.get("partbody"));
-             * if(partbodyValue != null) { value = 1.0f * partbodyValue /
-             * this.PARTBODY_DICT.get("NULL"); // 部位值表示 value = (float)(Math.round(value *
-             * 100000))/100000; // 保留5位有效数字 } value = negative * value; featureVector[position] =
-             * value;
-             * 
-             */
-
         }
 
         return featureVector;
     }
 
-
-    /**
-     * 读取字典
-     */
-//     @Override
-//     public void readDict(String modelAndVersion) {
-//    
-//     PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
-//     String model_version = prop.getProperty(modelAndVersion);
-//     model_version = model_version.trim();
-//    
-//     String url = "jdbc:mysql://192.168.2.235/diagbot-app?user=root&password=diagbot@20180822";
-//     MysqlConnector connector = new MysqlConnector(url);
-//     String querySql = "SELECT md._name, md._index, md.type_id " + "FROM model_dictionary AS md "
-//     + "WHERE md.belong_model = 'outpatient_model'";
-//    
-//     querySql = querySql.replace("outpatient_model", model_version);
-//     ResultSet rs = connector.query(querySql);
-//     try {
-//     while (rs.next()) {
-//     int type_id = rs.getInt("type_id");
-//     int _index = rs.getInt("_index");
-//     String _name = rs.getString("_name");
-//    
-//     if (type_id == 1)
-//     this.FEATURE_DICT.put(_name, _index);
-//     else if (type_id == 2)
-//     this.LABEL_DICT.put(_name, _index);
-//     else if (type_id == 8)
-//     this.NEGATIVE_DICT.put(_name, _index);
-//    
-//     }
-//    
-//     System.out.println("feature size:"+this.FEATURE_DICT.size());
-//    
-//     } catch (SQLException e) {
-//     e.printStackTrace();
-//     throw new RuntimeException("加载特征和类别字典失败");
-//     } finally {
-//     connector.close();
-//     }
-//    
-//     }
-
     @Override
     public void readDict(String modelAndVersion) {
-        
+
         PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
         String model_version = prop.getProperty(modelAndVersion);
 
         String filePath = prop.getProperty("basicPath");  // 基本目录
         filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
-        
+
         filePath = filePath + "dictionaries.bin";  // 字典文件位置
-        
+
         List<String> lines = TextFileReader.readLines(filePath);
 
         boolean firstLine = true;
-        
+
         String[] temp = null;
         for (String line : lines) {
             if (firstLine) {  // 去除第一行
                 firstLine = false;
                 continue;
             }
-            
+
             temp = line.split("\\|");
-            
-            if(temp[3].equals(model_version)){
+
+            if (temp[3].equals(model_version)) {
                 int type_id = Integer.parseInt(temp[2]);
                 int _index = Integer.parseInt(temp[1]);
                 String _name = temp[0];
@@ -168,4 +109,103 @@ public class NNDataSetImpl extends NNDataSet {
 
     }
 
+    /**
+     * 再分词:
+     * 基本操作:
+     * 如果再分词表中有某一词项,则移除它,并添加该此项对应的细分词项
+     *
+     * @param inputs 输入
+     */
+    public void reSplitWord(Map<String, Map<String, String>> inputs) {
+        Iterator<Entry<String, Map<String, String>>> entries = inputs.entrySet().iterator();
+
+        String featureName = "";
+        String[] splitWords = null;
+        Map<String, String> featureValues = null;
+        Entry<String, Map<String, String>> entry;
+
+        Map<String, Map<String, String>> tempHashMap = new HashMap<>();  // 用于暂存key, value
+
+        while (entries.hasNext()) {
+            entry = entries.next();
+            featureName = entry.getKey();
+            if (this.FEATURE_DICT.get(featureName) == null  // 特征字典中没有然后再分词
+                    && this.RE_SPLIT_WORD_DICT.get(featureName) != null) {
+                entries.remove();  // 移除该词项
+                splitWords = this.RE_SPLIT_WORD_DICT.get(featureName).split(",");
+                for (String word : splitWords) {  // 添加细分词项
+                    featureValues = new HashMap<>();
+                    featureValues.put("negative", "有"); // 设置为阳性词
+                    tempHashMap.put(word, featureValues);
+                }
+
+            }
+        }
+
+        inputs.putAll(tempHashMap);
+    }
+
+    @Override
+    public void readReSplitWordDict() {
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+        String filePath = prop.getProperty("basicPath");  // 基本目录
+        filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
+
+        filePath = filePath + "re_split_word.bin";  // 字典文件位置
+
+        List<String> lines = TextFileReader.readLines(filePath);
+
+        boolean firstLine = true;
+
+        String[] temp = null;
+        Map<String, String> feature_map = null;
+        for (String line : lines) {
+            if (firstLine) {  // 去除第一行
+                firstLine = false;
+                continue;
+            }
+
+            temp = line.split("\\|");
+
+            this.RE_SPLIT_WORD_DICT.put(temp[0], temp[1]);
+
+        }
+
+        System.out.println("再分词,词条数:" + this.RE_SPLIT_WORD_DICT.size());
+
+    }
+
+    @Override
+    public void readFilterDiagnosisDict() {
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+        String filePath = prop.getProperty("basicPath");  // 基本目录
+        filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
+
+        filePath = filePath + "filter_diagnoses.bin";  // 字典文件位置
+
+        List<String> lines = TextFileReader.readLines(filePath);
+
+        boolean firstLine = true;
+
+        String[] temp = null;
+        String[] diagnoses = null;
+        Map<String, Integer> diagnosis_map = null;
+        for (String line : lines) {
+            if (firstLine) {  // 去除第一行
+                firstLine = false;
+                continue;
+            }
+
+            temp = line.split("\\|");
+            diagnoses = temp[1].split("_");
+            diagnosis_map = new HashMap<>();
+            for (String diagnosis: diagnoses)
+                diagnosis_map.put(diagnosis, 1);
+            this.RELATED_DIAGNOSIS_DICT.put(temp[0], diagnosis_map);
+        }
+
+        System.out.println("疾病过滤字典大小:" + this.RELATED_DIAGNOSIS_DICT.size());
+    }
+
+
 }

+ 11 - 1
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImplNonParallel.java

@@ -22,7 +22,17 @@ public class NNDataSetImplNonParallel extends NNDataSet {
         super(modelAndVersion);
     }
 
-    
+
+    @Override
+    public void readReSplitWordDict() {
+
+    }
+
+    @Override
+    public void readFilterDiagnosisDict() {
+
+    }
+
     @Override
     public float[] toFeatureVector(Map<String, Map<String, String>> inputs) {
         // inputs {症状名:{partbody:部位名, property:属性名, duration:时间类别, sex:性别值, age:年龄值}

+ 33 - 0
algorithm/src/main/java/org/algorithm/factory/RelationExtractionFactory.java

@@ -0,0 +1,33 @@
+package org.algorithm.factory;
+
+import org.algorithm.core.cnn.AlgorithmCNNExecutorPacs;
+import org.algorithm.core.cnn.model.RelationExtractionEnsembleModel;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/9/10 15:25
+ */
+public class RelationExtractionFactory {
+    private static RelationExtractionEnsembleModel relationExtractionEnsembleModelInstance = null;
+
+    public static AlgorithmCNNExecutorPacs getInstance() {
+        try {
+            relationExtractionEnsembleModelInstance = (RelationExtractionEnsembleModel) create(relationExtractionEnsembleModelInstance, RelationExtractionEnsembleModel.class);
+        } catch (InstantiationException inst) {
+            inst.printStackTrace();
+        } catch (IllegalAccessException ille) {
+            ille.printStackTrace();
+        }
+        return relationExtractionEnsembleModelInstance;
+    }
+
+    private static Object create(Object obj, Class cls) throws InstantiationException, IllegalAccessException {
+        if (obj == null) {
+            synchronized (cls) {
+                obj = cls.newInstance();
+            }
+        }
+        return obj;
+    }
+}

+ 5 - 3
algorithm/src/main/java/org/algorithm/test/ReEnsembleModelTest.java

@@ -18,7 +18,7 @@ public class ReEnsembleModelTest {
 
     public static void main(String[] args) {
         RelationExtractionEnsembleModel ensembleModel = new RelationExtractionEnsembleModel();
-
+        List<List<String>> result = new ArrayList<>();
         List<Triad> triads = new ArrayList<>();
         Triad triad_1 = new Triad();
         Lemma l_1 = new Lemma();
@@ -36,9 +36,11 @@ public class ReEnsembleModelTest {
 
         long start = System.nanoTime();
         for (int i=0; i<200; i++)  // 重复100次
-            triads = ensembleModel.execute("患者剧烈胸痛头痛失眠不安", triads);
+        {
+            result = ensembleModel.execute("患者剧烈胸痛头痛失眠不安", triads);
+        }
         long elapsedTime = System.nanoTime() - start;
-        System.out.println(triads.size());
+        System.out.println(result.size());
         System.out.println(elapsedTime);
     }
 }

+ 9 - 4
algorithm/src/main/java/org/algorithm/test/TensorflowExcutorTest.java

@@ -1,5 +1,6 @@
 package org.algorithm.test;
 
+import org.algorithm.core.neural.DiagnosisPredictExecutor;
 import org.algorithm.core.neural.SymptomPredictExecutor;
 import org.algorithm.util.Utils;
 
@@ -13,9 +14,9 @@ public class TensorflowExcutorTest {
         
         //TODO:change VitalPredictExcutor to test different executors
 //        VitalPredictExecutor excutor = new VitalPredictExecutor();
-        SymptomPredictExecutor excutor = new SymptomPredictExecutor();
+//        SymptomPredictExecutor excutor = new SymptomPredictExecutor();
 //        LisPredictExecutor excutor = new LisPredictExecutor();
-//        DiagnosisPredictExecutor excutor = new DiagnosisPredictExecutor();
+        DiagnosisPredictExecutor excutor = new DiagnosisPredictExecutor();
 //        PacsPredictExecutor excutor = new PacsPredictExecutor();
 //        DiagnosisToLisExecutor excutor = new DiagnosisToLisExecutor();
 //        DiagnosisToPacsExecutor excutor = new DiagnosisToPacsExecutor();
@@ -75,7 +76,11 @@ public class TensorflowExcutorTest {
         featureValues.put("age", "34");
         featureValues.put("negative", "有");
         featureValues.put("sn", "0");
-        aMap.put("踝关节疼痛", featureValues);
+
+        aMap.put("上臂远端疼痛", featureValues);
+        aMap.put("上肢远端青紫", featureValues);
+        aMap.put("肘部肿胀", featureValues);
+        aMap.put("外伤", featureValues);
 //        aMap.put("心悸", featureValues);
 //        aMap.put("气急", featureValues);
 //        aMap.put("头痛", featureValues);
@@ -87,7 +92,7 @@ public class TensorflowExcutorTest {
 //        for (Entry<String, Float> entry : result.entrySet()) {
 //            System.out.println(entry.getKey() + " : " + entry.getValue());
 //        }
-//        System.out.println(result);
+        System.out.println(result);
         Utils.top_k(10, result);
 
     }

+ 47 - 40
algorithm/src/main/java/org/algorithm/test/Test.java

@@ -1,49 +1,56 @@
 package org.algorithm.test;
 
+import java.util.*;
 
 public class Test {
-    
+
+
     public static void main(String[] args) {
-        
-//        Integer aa = new Integer(53);
-//        Integer bb = new Integer(954);
-//        float xx = 1.0f;
-//        for(int i=1; i< 955; i++) {
-//            xx = (float)(Math.round(1.0f * i / bb*100000))/100000;
-//            System.out.println(i+":"+xx);
-////        }
-//        String filePath = "/opt/models/model_version_replacement/model";
-//        int index = filePath.indexOf("model_version_replacement");
-//
-//        System.out.println(filePath.substring(0, index));
-//            public static void testJSONStrToJavaBeanObj(){
-//
-//        Student student = JSON.parseObject(JSON_OBJ_STR, new TypeReference<Student>() {});
-//        //Student student1 = JSONObject.parseObject(JSON_OBJ_STR, new TypeReference<Student>() {});//因为JSONObject继承了JSON,所以这样也是可以的
-//
-//        System.out.println(student.getStudentName()+":"+student.getStudentAge());
-//
-        String JSON_ARRAY_STR = "[{\"length\":4,\"offset\":0,\"property\":\"1\",\"text\":\"剑突下痛\",\"threshold\":0.0},{\"length\":2,\"offset\":4,\"property\":\"1\",\"text\":\"胀痛\",\"threshold\":0.0},{\"length\":2,\"offset\":6,\"property\":\"2\",\"text\":\"1天\",\"threshold\":0.0},{\"length\":1,\"offset\":8,\"text\":\",\",\"threshold\":0.0}]\n";
-//        JSONArray jsonArray = JSONArray.parseArray(JSON_ARRAY_STR);
-////        String jsonString = "{\"length\":4,\"offset\":0,\"property\":\"1\",\"text\":\"剑突下痛\",\"threshold\":0.0}";
-//
-//       for (int i = 0; i < jsonArray.size(); i++){
-//           JSONObject job = jsonArray.getJSONObject(i);
-//           LemmaInfo info = JSON.parseObject(job.toJSONString(), new TypeReference<LemmaInfo>() {});
-//           //Student student1 = JSONObject.parseObject(JSON_OBJ_STR, new TypeReference<Student>() {});//因为JSONObject继承了JSON,所以这样也是可以的
-//
-//           System.out.println(info.getLength()+":"+info.getText());
-//       }
-
-        int index = 0;
-        for (int i=0; i<5; i++)
-            for (int j = i+1; j< 6; j++){
-                System.out.println(i + "," + j);
-                index ++;
-            }
-
-        System.out.println(index);
+        List<Integer> data = new ArrayList<>();
+        data.add(1);
+        data.add(3);
+        data.add(5);
+        data.add(7);
+        Test t = new Test();
+
+        List<List<Integer>> workSpace = new ArrayList<>();
+        for (int i = 1; i < data.size(); i++) {
+            t.combinerSelect(data, new ArrayList<>(), workSpace, data.size(), i);
+        }
+
+        System.out.println(workSpace);
 
     }
 
+    /**
+     * 组合生成器
+     *
+     * @param data      原始数据
+     * @param workSpace 自定义一个临时空间,用来存储每次符合条件的值
+     * @param k         C(n,k)中的k
+     */
+    public <E> void combinerSelect(List<E> data, List<E> workSpace, List<List<E>> result, int n, int k) {
+        List<E> copyData;
+        List<E> copyWorkSpace = null;
+
+        if (workSpace.size() == k) {
+            for (E c : workSpace)
+                System.out.print(c);
+
+            result.add(new ArrayList<>(workSpace));
+            System.out.println();
+        }
+
+        for (int i = 0; i < data.size(); i++) {
+            copyData = new ArrayList<E>(data);
+            copyWorkSpace = new ArrayList<E>(workSpace);
+
+            copyWorkSpace.add(copyData.get(i));
+            for (int j = i; j >= 0; j--)
+                copyData.remove(j);
+            combinerSelect(copyData, copyWorkSpace, result, n, k);
+        }
+    }
+
 }
+

+ 46 - 0
algorithm/src/main/java/org/algorithm/test/TestDiagnosisFilter.java

@@ -0,0 +1,46 @@
+package org.algorithm.test;
+
+import org.algorithm.core.neural.dataset.NNDataSetImpl;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * @Author: bijl
+ * @Date: 2019/9/23 10:49
+ * @Description:
+ */
+public class TestDiagnosisFilter {
+
+    public static void main(String[] args) {
+        NNDataSetImpl dataSet = new NNDataSetImpl("diagnosisPredict.version");
+
+        dataSet.readFilterDiagnosisDict();  // 读取过滤表
+//        鼻炎|0|2|outpatient_556_IOE_1
+//        肺癌|1|2|outpatient_556_IOE_1
+//        胃肠炎|2|2|outpatient_556_IOE_1
+//        屈光不正|3|2|outpatient_556_IOE_1
+        // 构造方式:去查dictionaries.bin文件中outpatient_556_IOE_1,相关的疾病,形如上
+        float[][] predict = {{0.1f, 0.2f, 0.3f, 0.4f}};
+
+        // 构造输入
+        Map<String, Map<String, String>> inputs = new HashMap<>();
+        Map<String, String> featureMap = new HashMap<>();
+        featureMap.put("negative", "有");
+        featureMap.put("property", "11");
+
+        // 构造方式:去查filter_diagnoses.bin文件中与上述疾病相关的一个或多个特征,加入
+        inputs.put("上腹压痛", featureMap);  // 上腹压痛,只与,胃肠炎,相关
+        // 保存输入
+        dataSet.storeFeatureNames(inputs);
+
+        // 过滤疾病
+        Map<String, Float> result = dataSet.wrapAndFilter(predict);
+        Map<String, Float> result_no_filter = dataSet.basicWrap(predict);
+
+        System.out.println("无疾病过滤:" + result_no_filter);  // 期望输出 {鼻炎=0.1, 肺癌=0.2, 胃肠炎=0.3, 屈光不正=0.4}
+        System.out.println("疾病过滤:" + result);  // 期望输出{胃肠炎=0.3}
+
+
+    }
+}

+ 34 - 0
algorithm/src/main/java/org/algorithm/test/TestReSplit.java

@@ -0,0 +1,34 @@
+package org.algorithm.test;
+
+import org.algorithm.core.neural.dataset.NNDataSetImpl;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * 测试再分词
+ * @Author: bijl
+ * @Date: 2019/9/23 10:46
+ * @Description:
+ */
+public class TestReSplit {
+
+    public static void main(String[] args) {
+
+        NNDataSetImpl dataSet = new NNDataSetImpl("diagnosisPredict.version");
+
+        // 构造输入
+        Map<String, Map<String, String>> inputs = new HashMap<>();
+
+        Map<String, String> featureMap = new HashMap<>();
+        featureMap.put("negative", "有");
+        featureMap.put("property", "11");
+
+        inputs.put("幽门螺杆菌感染", featureMap);
+
+        // 对比再分词前后的变化
+        System.out.println("原来数据:" + inputs);
+        dataSet.reSplitWord(inputs);
+        System.out.println("再分词后数据:" + inputs);
+    }
+}

+ 15 - 0
algorithm/src/main/java/org/algorithm/test/TestRelationTreeUtils.java

@@ -0,0 +1,15 @@
+package org.algorithm.test;
+
+import org.algorithm.core.RelationTreeUtils;
+
+/**
+ * @Author: bijl
+ * @Date: 2019/9/5 17:07
+ * @Description:
+ */
+public class TestRelationTreeUtils {
+
+    public static void main(String[] args) {
+        RelationTreeUtils.test();
+    }
+}

File diff suppressed because it is too large
+ 140 - 0
algorithm/src/main/java/org/algorithm/test/TestRuleCheckMachine.java


+ 1 - 1
algorithm/src/main/java/org/algorithm/util/MysqlConnector.java

@@ -45,7 +45,7 @@ public class MysqlConnector {
     
     /**
      * 执行sql语句
-     * @param sql
+     * @param sqls
      */
     public void executeBatch(List<String> sqls) {
         Statement stmt = null;

+ 1 - 1
algorithm/src/main/resources/algorithm.properties

@@ -2,7 +2,7 @@
 
 #basicPath=E:/project/push/algorithm/src/main/models/model_version_replacement/model
 basicPath=/opt/models/dev/models/model_version_replacement/model
-#basicPath=E:/xxx/model_version_replacement/model
+#basicPath=E:/re_models/model_version_replacement/model
 
 ############################### current model version ################################
 diagnosisPredict.version=outpatient_556_IOE_1

+ 6 - 0
bigdata-web/pom.xml

@@ -43,6 +43,12 @@
             <version>1.0.0</version>
         </dependency>
 
+		<dependency>
+			<groupId>org.diagbot</groupId>
+			<artifactId>common-push</artifactId>
+			<version>1.0.0</version>
+		</dependency>
+
         <dependency>
             <groupId>org.diagbot</groupId>
             <artifactId>common-service</artifactId>

+ 0 - 233
bigdata-web/src/main/java/org/diagbot/bigdata/common/ApplicationCacheUtil.java

@@ -1,233 +0,0 @@
-package org.diagbot.bigdata.common;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.nlp.participle.ParticipleUtil;
-import org.diagbot.nlp.participle.cfg.Configuration;
-import org.diagbot.nlp.participle.cfg.DefaultConfig;
-import org.diagbot.nlp.participle.word.Lexeme;
-import org.diagbot.nlp.participle.word.LexemePath;
-import org.diagbot.nlp.util.NegativeEnum;
-import org.diagbot.nlp.util.NlpCache;
-import org.diagbot.pub.utils.security.EncrypDES;
-
-import javax.servlet.ServletContext;
-import java.util.*;
-
-public class ApplicationCacheUtil {
-
-    //词库同义词定义
-    public static Map<String, Map<String, String>> standard_info_synonym_map = null;
-    //词库大小类定义
-    public static Map<String, String> standard_info_classify_map = null;
-    //树形结构存储大小类
-    public static Map<String, NlpCache.Node> standard_info_type_tree_map = null;
-//    体征衍射
-//    public static Map<String, String> doc_result_mapping_vital_map = null;
-    //诊断科室衍射
-    public static Map<String, String> doc_result_mapping_diag_map = null;
-    //特征性别 年龄过滤等
-    public static Map<String, Map<String, ResultMappingFilter>> doc_result_mapping_filter_map = null;
-    //诊断依据标准词
-    public static Map<String, List<Map<String, String>>> kl_result_mapping_standword_map = null;
-
-    public static Map<String, Map<String, String>> getStandard_info_synonym_map() {
-        if (standard_info_synonym_map == null) {
-            standard_info_synonym_map = NlpCache.getStandard_info_synonym_map();
-        }
-        return standard_info_synonym_map;
-    }
-
-    public static Map<String, String> getStandard_info_classify_map() {
-        if (standard_info_classify_map == null) {
-            standard_info_classify_map = NlpCache.getStandard_info_classify_map();
-        }
-        return standard_info_classify_map;
-    }
-
-    public static Map<String, NlpCache.Node> getStandard_info_type_tree_map() {
-        if (standard_info_type_tree_map == null) {
-            standard_info_type_tree_map = NlpCache.getStandard_info_type_tree_map();
-        }
-        return standard_info_type_tree_map;
-    }
-
-//    /**
-//     * 現已無用
-//     * @return
-//     */
-//    public static Map<String, String> getDoc_result_mapping_vital_map() {
-//        if (doc_result_mapping_vital_map == null) {
-//            Configuration configuration = new DefaultConfig();
-//            doc_result_mapping_vital_map = configuration.loadMapDict("doc_result_mapping_vital.dict");
-//        }
-//        return doc_result_mapping_vital_map;
-//    }
-
-    public static Map<String, String> getDoc_result_mapping_diag_map() {
-        if (doc_result_mapping_diag_map == null) {
-            createDoc_result_mapping_diag_map();
-        }
-        return doc_result_mapping_diag_map;
-    }
-
-    public static Map<String, String> createDoc_result_mapping_diag_map() {
-        Configuration configuration = new DefaultConfig();
-        doc_result_mapping_diag_map = configuration.loadMapDict("bigdata_diag_2_dept.dict");
-        return doc_result_mapping_diag_map;
-    }
-
-    public static Map<String, Map<String, ResultMappingFilter>> getDoc_result_mapping_filter_map() {
-        if (doc_result_mapping_filter_map == null) {
-            createDoc_result_mapping_filter_map();
-        }
-        return doc_result_mapping_filter_map;
-    }
-
-    public static Map<String, Map<String, ResultMappingFilter>> createDoc_result_mapping_filter_map() {
-        Configuration configuration = new DefaultConfig();
-        List<String> fileContents = configuration.readFileContents("bigdata_sex_age_filter.dict");
-        String[] line_string;
-        List<ResultMappingFilter> resultMappingFilters = new ArrayList<>();
-        try {
-            for (int i = 0; i < fileContents.size(); i++) {
-                line_string = org.apache.commons.lang3.StringUtils.split(fileContents.get(i), "\\|");
-                if (line_string.length == 5) {
-                    ResultMappingFilter resultMappingFilter = new ResultMappingFilter();
-                    resultMappingFilter.setFeatureName(line_string[0]);
-                    resultMappingFilter.setFeatureType(line_string[1]);
-                    resultMappingFilter.setSex(line_string[2]);
-                    resultMappingFilter.setAgeStart(Integer.parseInt(line_string[3]));
-                    resultMappingFilter.setAgeEnd(Integer.parseInt(line_string[4]));
-                    resultMappingFilters.add(resultMappingFilter);
-                }
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        doc_result_mapping_filter_map = new HashMap<>();
-        Map<String, ResultMappingFilter> filterMap = null;
-        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
-            filterMap = doc_result_mapping_filter_map.get(resultMappingFilter.getFeatureType());
-            if (filterMap == null) {
-                filterMap = new HashMap<>();
-            }
-            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
-            doc_result_mapping_filter_map.put(resultMappingFilter.getFeatureType(), filterMap);
-        }
-        return doc_result_mapping_filter_map;
-    }
-
-    public static Map<String, List<Map<String, String>>> getKl_result_mapping_standword_map() {
-        if (kl_result_mapping_standword_map == null) {
-            createKl_result_mapping_standword_map();
-        }
-        return kl_result_mapping_standword_map;
-    }
-
-    public static Map<String, List<Map<String, String>>> createKl_result_mapping_standword_map() {
-        kl_result_mapping_standword_map = new HashMap<>();
-        Configuration configuration = new DefaultConfig();
-        List<String> fileContents = configuration.readFileContents("bigdata_value_analyze.dict");
-        List<Map<String, String>> standWordObjValList = null;
-        Map<String, String> standWordObjVal = null;
-        String operation = ">=|≥|>|大于|>|超过|<=|≤|<|小于|<|少于";
-        try {
-            for (String fileContent : fileContents) {
-                LexemePath<Lexeme> lexemes = null;
-                String op = "";
-                String[] fileContentSplit = null;
-                //每一个标准词根据大于小于符号切开,不然进行分词时还是会得到原本的标准词
-                if (fileContent.contains(">") || fileContent.contains("大于")
-                        || fileContent.contains(">") || fileContent.contains("超过")) {
-                    op = ">";
-                    fileContentSplit = fileContent.split(operation);
-                } else if (fileContent.contains("<") || fileContent.contains("小于")
-                        || fileContent.contains("<") || fileContent.contains("少于")) {
-                    op = "<";
-                    fileContentSplit = fileContent.split(operation);
-                } else if (fileContent.contains(">=") || fileContent.contains("≥")){
-                    op = ">=";
-                    fileContentSplit = fileContent.split(operation);
-                } else if (fileContent.contains("<=") || fileContent.contains("≤")) {
-                    op = "<=";
-                    fileContentSplit = fileContent.split(operation);
-                } else {
-                    continue;
-                }
-                LexemePath<Lexeme> lexemeWord = null;
-                //每一个标准词切开后进行分词
-                for (String fileContentWords : fileContentSplit) {
-                    lexemeWord = ParticipleUtil.participle(fileContentWords);
-                    if (lexemeWord != null) {
-                        if (null == lexemes) {
-                            lexemes = lexemeWord;
-                        } else {
-                            for (Lexeme lexeme : lexemeWord) {
-                                lexemes.add(lexeme);
-                            }
-                        }
-                    }
-                }
-                String standWordObjKey = "";
-                standWordObjValList = new ArrayList<>();
-                standWordObjVal = new HashMap<>();
-                int i = 0;
-                for (Lexeme lexeme : lexemes) {
-                    i++;
-                    if (lexeme.getProperty().contains(",")) {
-                        setProterty(lexeme); //如果分词后词性有多个,只选一个(暂时只处理症状,体征)
-                    }
-                    NegativeEnum lexemeNegativeEnum = NegativeEnum.parseOfValue(lexeme.getProperty());
-                    if (lexemeNegativeEnum == NegativeEnum.SYMPTOM || lexemeNegativeEnum == NegativeEnum.CAUSE
-                            || lexemeNegativeEnum == NegativeEnum.VITAL_INDEX
-                            || lexemeNegativeEnum == NegativeEnum.DIAG_STAND) {
-                        if (!kl_result_mapping_standword_map.containsKey(lexeme.getText())) {
-                            kl_result_mapping_standword_map.put(lexeme.getText(), standWordObjValList);
-                        } else {
-                            standWordObjKey = lexeme.getText();
-                        }
-                    }
-                    if (lexemeNegativeEnum == NegativeEnum.DIGITS) {
-                        standWordObjVal.put("value", lexeme.getText());
-                    }
-                    if (lexemeNegativeEnum == NegativeEnum.UNIT
-                            || lexemeNegativeEnum == NegativeEnum.EVENT_TIME
-                            || lexemeNegativeEnum == NegativeEnum.OTHER) {
-                        standWordObjVal.put("unit", lexeme.getText().toLowerCase());
-                    }
-                    if (lexemes.size() == i) {
-                        standWordObjVal.put("op", op);
-                        standWordObjVal.put("standword", fileContent);
-                        if (kl_result_mapping_standword_map.containsKey(standWordObjKey)) {
-                            kl_result_mapping_standword_map.get(standWordObjKey).add(standWordObjVal);
-                        } else {
-                            standWordObjValList.add(standWordObjVal);
-                        }
-                    }
-                }
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-        return kl_result_mapping_standword_map;
-    }
-
-    public static void setProterty(Lexeme lexeme) {
-        for (String featureType : lexeme.getProperty().split(",")) {
-            switch (featureType) {
-                case "1":
-                    lexeme.setProperty("1");
-                    break;
-                case "33":
-                    lexeme.setProperty("33");
-                    break;
-                case "70":
-                    lexeme.setProperty("70");
-                    break;
-
-            }
-        }
-    }
-}

+ 0 - 98
bigdata-web/src/main/java/org/diagbot/bigdata/common/InitListener.java

@@ -1,98 +0,0 @@
-package org.diagbot.bigdata.common;
-
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.service.ResultMappingDiagService;
-import org.diagbot.bigdata.service.ResultMappingFilterService;
-import org.diagbot.bigdata.service.ResultMappingVitalService;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.nlp.participle.cfg.Configuration;
-import org.diagbot.nlp.participle.cfg.DefaultConfig;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-import javax.servlet.annotation.WebListener;
-import java.util.*;
-
-@WebListener
-public class InitListener implements ServletContextListener {
-    @Autowired
-    ResultMappingVitalService resultMappingVitalService;
-    @Autowired
-    ResultMappingDiagService resultMappingDiagService;
-    @Autowired
-    ResultMappingFilterService resultMappingFilterService;
-
-    public void contextDestroyed(ServletContextEvent arg0) {
-
-    }
-
-    /**
-     * 开始初始化数据
-     *
-     * @return
-     */
-    public void contextInitialized(ServletContextEvent event) {
-//        contextStandardLibraryInitialized(event);
-//        contextFeatureMappingInitialized(event);
-//        contextResultMappingDiagInitialized(event);
-//        contextResultMappingFilterInitialized(event);
-    }
-
-//    public void contextStandardLibraryInitialized(ServletContextEvent event) {
-//        ApplicationCacheUtil applicationCacheUtil = new ApplicationCacheUtil();
-//        applicationCacheUtil.putStandardInfoContext(event.getServletContext());
-//    }
-
-//    public void contextFeatureMappingInitialized(ServletContextEvent event) {
-//        Configuration configuration = new DefaultConfig();
-//        Map<String, String> resultMappingVitals = configuration.loadMapDict("tc.dict");
-////        List<ResultMappingVital> resultMappingVitals = resultMappingVitalService.selectList(new HashMap<>());
-//        Map<String, String> mapping = new HashMap<>();
-//        for (ResultMappingVital resultMappingVital : resultMappingVitals) {
-//            mapping.put(resultMappingVital.getName(), resultMappingVital.getNameMapping());
-//        }
-//        event.getServletContext().setAttribute(BigDataConstants.result_mapping_vital, mapping);
-//    }
-//
-//    public void contextResultMappingDiagInitialized(ServletContextEvent event) {
-//        List<ResultMappingDiag> resultMappingDiags = resultMappingDiagService.selectList(new HashMap<>());
-//
-//        Map<String, String> mapping = new HashMap<>();
-//        for (ResultMappingDiag resultMappingDiag : resultMappingDiags) {
-//            mapping.put(resultMappingDiag.getDiagName(), resultMappingDiag.getDeptName());
-//        }
-//        event.getServletContext().setAttribute(BigDataConstants.result_mapping_diag, mapping);
-//    }
-//
-//    public void contextResultMappingFilterInitialized(ServletContextEvent event) {
-//        List<ResultMappingFilter> resultMappingFilters = resultMappingFilterService.selectList(new HashMap<>());
-//
-//        Map<String, Map<String, ResultMappingFilter>> mapping = new HashMap<>();
-//        Map<String, ResultMappingFilter> filterMap = null;
-//        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
-//            filterMap = mapping.get(resultMappingFilter.getFeatureType());
-//            if (filterMap == null) {
-//                filterMap = new HashMap<>();
-//            }
-//            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
-//            mapping.put(resultMappingFilter.getFeatureType(), filterMap);
-//        }
-//        event.getServletContext().setAttribute(BigDataConstants.result_mapping_filter, mapping);
-//    }
-
-    private void put(Map<String, List<String>> map, String key, List<String> value, String ele) {
-        if (value == null) {
-            value = new ArrayList<>(Arrays.asList(ele));
-            map.put(key, value);
-        } else {
-            if (!value.contains(ele)) {
-                value.add(ele);
-                map.put(key, value);
-            }
-        }
-    }
-}

+ 3 - 3
bigdata-web/src/main/java/org/diagbot/bigdata/controller/AlgorithmController.java

@@ -1,8 +1,8 @@
 package org.diagbot.bigdata.controller;
 
 import org.diagbot.bigdata.work.AlgorithmCore;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.pub.api.Response;
 import org.diagbot.pub.web.BaseController;
@@ -34,7 +34,7 @@ public class AlgorithmController extends BaseController {
     public Response<ResponseData> algorithm(HttpServletRequest request, SearchData searchData) throws Exception {
         Response<ResponseData> response = new Response();
         AlgorithmCore core = new AlgorithmCore();
-        ResponseData responseData = core.algorithm(request, searchData);
+        ResponseData responseData = core.algorithm(request, searchData, null);
         response.setData(responseData);
         return response;
     }

+ 0 - 22
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingDiagMapper.java

@@ -1,22 +0,0 @@
-/** 
-* @Company: 杭州朗通信息技术有限公司
-* @Department: 医疗事业部
-* @Description: 互动反馈系统 
-* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
-*/
-package org.diagbot.bigdata.dao.mapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
-import org.diagbot.pub.orm.EntityMapper;
-
-/**
-* @Title: Feature.java
-* @Package: com.zjlantone.nlp.web.doc.dao.model
-* @Description: 数据库操作接口类 
-* @author: 楼辉荣
-* @date: 2016年8月8日 下午17:16:23
-* @version: V1.0
-*/
-public interface ResultMappingDiagMapper extends EntityMapper<ResultMappingDiag, ResultMappingDiagWrapper, Long> {
-}

+ 0 - 22
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingFilterMapper.java

@@ -1,22 +0,0 @@
-/** 
-* @Company: 杭州朗通信息技术有限公司
-* @Department: 医疗事业部
-* @Description: 互动反馈系统 
-* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
-*/
-package org.diagbot.bigdata.dao.mapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
-import org.diagbot.pub.orm.EntityMapper;
-
-/**
-* @Title: Feature.java
-* @Package: com.zjlantone.nlp.web.doc.dao.model
-* @Description: 数据库操作接口类 
-* @author: 楼辉荣
-* @date: 2016年8月8日 下午17:16:23
-* @version: V1.0
-*/
-public interface ResultMappingFilterMapper extends EntityMapper<ResultMappingFilter, ResultMappingFilterWrapper, Long> {
-}

+ 0 - 22
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingVitalMapper.java

@@ -1,22 +0,0 @@
-/** 
-* @Company: 杭州朗通信息技术有限公司
-* @Department: 医疗事业部
-* @Description: 互动反馈系统 
-* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
-*/
-package org.diagbot.bigdata.dao.mapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper;
-import org.diagbot.pub.orm.EntityMapper;
-
-/**
-* @Title: Feature.java
-* @Package: com.zjlantone.nlp.web.doc.dao.model
-* @Description: 数据库操作接口类 
-* @author: 楼辉荣
-* @date: 2016年8月8日 下午17:16:23
-* @version: V1.0
-*/
-public interface ResultMappingVitalMapper extends EntityMapper<ResultMappingVital, ResultMappingVitalWrapper, Long> {
-}

+ 0 - 38
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingDiag.java

@@ -1,38 +0,0 @@
-package org.diagbot.bigdata.dao.model;
-
-import java.io.Serializable;
-
-/**
- * @Auther: fyeman
- * @Date: 2018/9/12/012 16:50
- * @Description:
- */
-public class ResultMappingDiag implements Serializable {
-    private Long id;
-    private String diagName;
-    private String deptName;
-
-    public Long getId() {
-        return id;
-    }
-
-    public void setId(Long id) {
-        this.id = id;
-    }
-
-    public String getDiagName() {
-        return diagName;
-    }
-
-    public void setDiagName(String diagName) {
-        this.diagName = diagName;
-    }
-
-    public String getDeptName() {
-        return deptName;
-    }
-
-    public void setDeptName(String deptName) {
-        this.deptName = deptName;
-    }
-}

+ 0 - 38
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingVital.java

@@ -1,38 +0,0 @@
-package org.diagbot.bigdata.dao.model;
-
-import java.io.Serializable;
-
-/**
- * @Auther: fyeman
- * @Date: 2018/9/10/010 14:30
- * @Description:
- */
-public class ResultMappingVital implements Serializable {
-    private Long id;
-    private String name;
-    private String nameMapping;
-
-    public Long getId() {
-        return id;
-    }
-
-    public void setId(Long id) {
-        this.id = id;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public String getNameMapping() {
-        return nameMapping;
-    }
-
-    public void setNameMapping(String nameMapping) {
-        this.nameMapping = nameMapping;
-    }
-}

+ 0 - 12
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingDiagWrapper.java

@@ -1,12 +0,0 @@
-package org.diagbot.bigdata.dao.model.wrapper;
-
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-
-/**
- * @Auther: fyeman
- * @Date: 2018/9/12/012 16:51
- * @Description:
- */
-public class ResultMappingDiagWrapper extends ResultMappingDiag {
-}

+ 0 - 6
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingFilterWrapper.java

@@ -1,6 +0,0 @@
-package org.diagbot.bigdata.dao.model.wrapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-
-public class ResultMappingFilterWrapper extends ResultMappingFilter {
-}

+ 0 - 7
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingVitalWrapper.java

@@ -1,7 +0,0 @@
-package org.diagbot.bigdata.dao.model.wrapper;
-
-
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-
-public class ResultMappingVitalWrapper extends ResultMappingVital {
-}

+ 0 - 55
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingDiagMapper.xml

@@ -1,55 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
-        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
-<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingDiagMapper">
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingDiag" id="resultMappingDiagMap">
-        <id property="id" column="id"/>
-        <result property="diagName" column="diag_name"/>
-        <result property="deptName" column="dept_name"/>
-    </resultMap>
-
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper" id="resultMappingDiagWrapperMap">
-        <id property="id" column="id"/>
-        <result property="diagName" column="diag_name"/>
-        <result property="deptName" column="dept_name"/>
-    </resultMap>
-
-    <!-- 通用查询结果列-->
-    <sql id="Base_Column_List">
-		 t.id,	 t.diag_name,	 t.dept_name
-	</sql>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectByPrimaryKey" resultMap="resultMappingDiagMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_diag t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingDiagWrapperMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_diag t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 依据Map查询条件返回结果集-->
-    <select id="selectList" resultMap="resultMappingDiagMap" parameterType="java.util.Map">
-        select diag_name, group_concat(dept_name) dept_name from doc_result_mapping_diag  group by diag_name
-    </select>
-
-    <!-- 依据Map查询条件返回扩展属性结果集-->
-    <select id="selectListWrapper" resultMap="resultMappingDiagWrapperMap" parameterType="java.util.Map">
-        select diag_name, group_concat(dept_name) dept_name from doc_result_mapping_diag  group by diag_name
-    </select>
-
-    <!--删除:根据主键ID删除-->
-    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
-		 DELETE FROM doc_result_mapping_diag
-		 WHERE id = #{id}
-	</delete>
-</mapper>

+ 0 - 67
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingFilterMapper.xml

@@ -1,67 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
-        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
-<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingFilterMapper">
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingFilter" id="resultMappingFilterMap">
-        <id property="id" column="id"/>
-        <result property="featureName" column="feature_name"/>
-        <result property="featureType" column="feature_type"/>
-        <result property="sex" column="sex"/>
-        <result property="ageStart" column="age_start"/>
-        <result property="ageEnd" column="age_end"/>
-        <result property="remark" column="remark"/>
-    </resultMap>
-
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper" id="resultMappingFilterWrapperMap">
-        <id property="id" column="id"/>
-        <result property="featureName" column="feature_name"/>
-        <result property="featureType" column="feature_type"/>
-        <result property="sex" column="sex"/>
-        <result property="ageStart" column="age_start"/>
-        <result property="ageEnd" column="age_end"/>
-        <result property="remark" column="remark"/>
-    </resultMap>
-
-    <!-- 通用查询结果列-->
-    <sql id="Base_Column_List">
-		 t.id, t.feature_name, t.feature_type, t.sex, t.age_start, t.age_end, t.remark
-	</sql>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectByPrimaryKey" resultMap="resultMappingFilterMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingFilterWrapperMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 依据Map查询条件返回结果集-->
-    <select id="selectList" resultMap="resultMappingFilterMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t WHERE 1=1
-    </select>
-
-    <!-- 依据Map查询条件返回扩展属性结果集-->
-    <select id="selectListWrapper" resultMap="resultMappingFilterWrapperMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t WHERE 1=1
-    </select>
-
-    <!--删除:根据主键ID删除-->
-    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
-		 DELETE FROM doc_result_mapping_filter
-		 WHERE id = #{id}
-	</delete>
-</mapper>

+ 0 - 77
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingVitalMapper.xml

@@ -1,77 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
-        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
-<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingVitalMapper">
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingVital" id="resultMappingVitalMap">
-        <id property="id" column="id"/>
-        <result property="name" column="name"/>
-        <result property="nameMapping" column="name_mapping"/>
-    </resultMap>
-
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper" id="resultMappingVitalWrapperMap">
-        <id property="id" column="id"/>
-        <result property="name" column="name"/>
-        <result property="nameMapping" column="name_mapping"/>
-    </resultMap>
-
-    <!-- 通用查询结果列-->
-    <sql id="Base_Column_List">
-		 t.id,	 t.name,	 t.name_mapping
-	</sql>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectByPrimaryKey" resultMap="resultMappingVitalMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingVitalWrapperMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 依据Map查询条件返回结果集-->
-    <select id="selectList" resultMap="resultMappingVitalMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t WHERE 1=1
-        <if test="id != null and id != ''">
-            and t.id = #{id}
-        </if>
-        <if test="name != null and name != ''">
-            and t.name = #{name}
-        </if>
-        <if test="nameMapping != null and nameMapping != ''">
-            and t.name_mapping = #{nameMapping}
-        </if>
-    </select>
-
-    <!-- 依据Map查询条件返回扩展属性结果集-->
-    <select id="selectListWrapper" resultMap="resultMappingVitalWrapperMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t WHERE 1=1
-        <if test="id != null and id != ''">
-            and t.id = #{id}
-        </if>
-        <if test="name != null and name != ''">
-            and t.name = #{name}
-        </if>
-        <if test="nameMapping != null and nameMapping != ''">
-            and t.name_mapping = #{nameMapping}
-        </if>
-    </select>
-
-    <!--删除:根据主键ID删除-->
-    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
-		 DELETE FROM doc_result_mapping_vital
-		 WHERE id = #{id}
-	</delete>
-</mapper>

+ 0 - 8
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingDiagService.java

@@ -1,8 +0,0 @@
-package org.diagbot.bigdata.service;
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
-import org.diagbot.pub.service.BaseService;
-
-public interface ResultMappingDiagService extends BaseService<ResultMappingDiag, ResultMappingDiagWrapper, Long> {
-}

+ 0 - 8
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingFilterService.java

@@ -1,8 +0,0 @@
-package org.diagbot.bigdata.service;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
-import org.diagbot.pub.service.BaseService;
-
-public interface ResultMappingFilterService extends BaseService<ResultMappingFilter, ResultMappingFilterWrapper, Long> {
-}

+ 0 - 8
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingVitalService.java

@@ -1,8 +0,0 @@
-package org.diagbot.bigdata.service;
-
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper;
-import org.diagbot.pub.service.BaseService;
-
-public interface ResultMappingVitalService extends BaseService<ResultMappingVital, ResultMappingVitalWrapper, Long> {
-}

+ 0 - 21
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingDiagServiceImpl.java

@@ -1,21 +0,0 @@
-package org.diagbot.bigdata.service.impl;
-
-import org.diagbot.bigdata.dao.mapper.ResultMappingDiagMapper;
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
-import org.diagbot.bigdata.service.ResultMappingDiagService;
-import org.diagbot.pub.service.BaseServiceImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class ResultMappingDiagServiceImpl extends BaseServiceImpl<ResultMappingDiag, ResultMappingDiagWrapper, Long> implements ResultMappingDiagService {
-    @Autowired
-    ResultMappingDiagMapper resultMappingDiagMapper;
-
-    @Autowired
-    private void setEntityMapper() {
-        super.setEntityMapper(resultMappingDiagMapper);
-    }
-
-}

+ 0 - 21
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingFilterServiceImpl.java

@@ -1,21 +0,0 @@
-package org.diagbot.bigdata.service.impl;
-
-import org.diagbot.bigdata.dao.mapper.ResultMappingFilterMapper;
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
-import org.diagbot.bigdata.service.ResultMappingFilterService;
-import org.diagbot.pub.service.BaseServiceImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class ResultMappingFilterServiceImpl extends BaseServiceImpl<ResultMappingFilter, ResultMappingFilterWrapper, Long> implements ResultMappingFilterService {
-    @Autowired
-    ResultMappingFilterMapper resultMappingFilterMapper;
-
-    @Autowired
-    private void setEntityMapper() {
-        super.setEntityMapper(resultMappingFilterMapper);
-    }
-
-}

+ 0 - 21
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingVitalServiceImpl.java

@@ -1,21 +0,0 @@
-package org.diagbot.bigdata.service.impl;
-
-import org.diagbot.bigdata.dao.mapper.ResultMappingVitalMapper;
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper;
-import org.diagbot.bigdata.service.ResultMappingVitalService;
-import org.diagbot.pub.service.BaseServiceImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class ResultMappingVitalServiceImpl extends BaseServiceImpl<ResultMappingVital, ResultMappingVitalWrapper, Long> implements ResultMappingVitalService {
-    @Autowired
-    ResultMappingVitalMapper resultMappingVitalMapper;
-
-    @Autowired
-    private void setEntityMapper() {
-        super.setEntityMapper(resultMappingVitalMapper);
-    }
-
-}

+ 6 - 9
bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java

@@ -3,9 +3,9 @@ package org.diagbot.bigdata.work;
 import org.algorithm.core.AlgorithmExecutor;
 import org.algorithm.factory.AlgorithmFactory;
 import org.algorithm.util.AlgorithmClassify;
-import org.diagbot.common.work.FeatureRate;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
 import org.diagbot.nlp.feature.FeatureType;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.nlp.util.NlpCache;
@@ -25,12 +25,11 @@ import java.util.*;
  **/
 public class AlgorithmCore {
     Logger logger = LoggerFactory.getLogger(AlgorithmCore.class);
-    public ResponseData algorithm(HttpServletRequest request, SearchData searchData) throws Exception {
-        ResponseData responseData = new ResponseData();
+    public ResponseData algorithm(HttpServletRequest request, SearchData searchData, ResponseData responseData) throws Exception {
         //录入文本处理,包括提取特征、推送类型转换等
-        ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
+        BigDataParamsProxy paramsDataProxy = new BigDataParamsProxy();
         logger.info("页面文本信息:" + searchData.getSymptom());
-        paramsDataProxy.createSearchData(request, searchData);
+        paramsDataProxy.createSearchData(searchData);
         //对象拷贝至BigDataSearchData处理
         BigDataSearchData bigDataSearchData = new BigDataSearchData();
         BeanUtils.copyProperties(searchData, bigDataSearchData);
@@ -63,8 +62,6 @@ public class AlgorithmCore {
                 if (Constants.feature_type_symptom.equals(searchData.getFeatureTypes()[i])) {
                     featuresMap = resultDataProxy.mapAdd(featuresMap, NlpCache.getStandard_info_push_map(), true);
                 }
-                //大小类合并
-                featuresMap = resultDataProxy.resultMerge(request, featuresMap);
                 //按模型计算的概率排序
                 featuresOrderList = new ArrayList<Map.Entry<String, Float>>(featuresMap.entrySet());
                 Collections.sort(featuresOrderList, new Comparator<Map.Entry<String, Float>>() {

+ 94 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataParamsProxy.java

@@ -0,0 +1,94 @@
+package org.diagbot.bigdata.work;
+
+import org.algorithm.util.AlgorithmClassify;
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.work.ParamsDataProxy;
+import org.diagbot.nlp.feature.FeatureType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 14:04
+ * @Version 1.0
+ **/
+public class BigDataParamsProxy {
+    Logger logger = LoggerFactory.getLogger(BigDataParamsProxy.class);
+
+    public void createSearchData(SearchData searchData) throws Exception {
+        ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
+        paramsDataProxy.createSearchData(searchData);
+    }
+    /**
+     * featureType转算法模型类型
+     *
+     * @param sysCode
+     * @param featureTypes
+     * @param searchData
+     */
+    public AlgorithmClassify[] createAlgorithmClassify(String sysCode, String[] featureTypes, SearchData searchData) {
+        AlgorithmClassify[] classifies = new AlgorithmClassify[featureTypes.length];
+        //下了诊断且其他信息全为空 反推标识
+        boolean reverse = !StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom());
+        for (int i = 0; i < featureTypes.length; i++) {
+            if (featureTypes[i] != null) {
+                //模型
+                switch (FeatureType.parse(featureTypes[i])) {
+                    case SYMPTOM:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_SYMPTOM;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_SYMPTOM;
+                        }
+                        break;
+                    case DIAG:
+                        if (reverse) {
+                            classifies[i] = null;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG;
+                        }
+                        break;
+                    case VITAL:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_VITAL;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_VITAL;
+                        }
+                        break;
+                    case LIS:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_LIS;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_LIS;
+                        }
+                        break;
+                    case PACS:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_PACS;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_PACS;
+                        }
+                        break;
+                    case TREAT:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_TREAT;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_TREAT;
+                        }
+                        break;
+                    case HISTORY:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_HISTORY;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_HISTORY;
+                        }
+                        break;
+                }
+            }
+        }
+        return classifies;
+    }
+}

+ 1 - 1
bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataSearchData.java

@@ -1,7 +1,7 @@
 package org.diagbot.bigdata.work;
 
 import org.algorithm.util.AlgorithmClassify;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.SearchData;
 
 public class BigDataSearchData extends SearchData {
     //模型

+ 0 - 702
bigdata-web/src/main/java/org/diagbot/bigdata/work/ParamsDataProxy.java

@@ -1,702 +0,0 @@
-package org.diagbot.bigdata.work;
-
-import org.algorithm.util.AlgorithmClassify;
-import org.apache.commons.lang3.StringUtils;
-import org.diagbot.bigdata.common.ApplicationCacheUtil;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.common.work.SearchData;
-import org.diagbot.nlp.feature.FeatureAnalyze;
-import org.diagbot.nlp.feature.FeatureType;
-import org.diagbot.nlp.participle.ParticipleUtil;
-import org.diagbot.nlp.participle.word.Lexeme;
-import org.diagbot.nlp.participle.word.LexemePath;
-import org.diagbot.nlp.util.Constants;
-import org.diagbot.nlp.util.NegativeEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.http.HttpServletRequest;
-import java.util.*;
-import java.util.regex.Pattern;
-
-/**
- * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
- * @Description TODO
- * @Author fyeman
- * @Date 2019/1/16/016 14:04
- * @Version 1.0
- **/
-public class ParamsDataProxy {
-    Logger logger = LoggerFactory.getLogger(ParamsDataProxy.class);
-    //标准词只处理的词性
-    public static NegativeEnum[] negativeEnums = new NegativeEnum[] { NegativeEnum.VITAL_INDEX, NegativeEnum.SYMPTOM
-            , NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME, NegativeEnum.UNIT, NegativeEnum.DIAG_STAND
-            , NegativeEnum.OTHER};
-    //标准词处理的三元组
-    public static NegativeEnum[][] negativeEnumTriple = {
-            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS, NegativeEnum.UNIT },
-            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
-            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS, NegativeEnum.UNIT },
-            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
-            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.UNIT },
-            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
-            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.OTHER }
-    };
-    //标准词处理的二元组
-    public static NegativeEnum[][] negativeEnumTwoTuple = {
-            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS },
-            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS }
-    };
-
-    public void createSearchData(HttpServletRequest request, SearchData searchData) throws Exception {
-        //消除空格
-        if (searchData.getSymptom() != null) {
-            searchData.setSymptom(searchData.getSymptom().trim());
-        }
-        if (searchData.getDiag() != null) {
-            searchData.setDiag(searchData.getDiag().trim());
-        }
-        //计算年龄区间
-        if (searchData.getAge() > 0) {
-            searchData.setAge_start(searchData.getAge() - 5);
-            searchData.setAge_end(searchData.getAge() + 5);
-        }
-        //修改性别代码
-        if (!StringUtils.isEmpty(searchData.getSex())) {
-            if ("M".equals(searchData.getSex())) {
-                searchData.setSex("1");
-            } else if ("F".equals(searchData.getSex())) {
-                searchData.setSex("2");
-            } else {
-                searchData.setSex("3");
-            }
-        } else {
-            searchData.setSex("3");
-        }
-        //默认查询门诊数据
-        if (StringUtils.isEmpty(searchData.getResourceType())) {
-            searchData.setResourceType(BigDataConstants.resource_type_o);
-        }
-        //给症状末尾添加诊断依据标准词
-        String[] items = { searchData.getSymptom(), searchData.getOther(), searchData.getVital()
-                , searchData.getLis(), searchData.getPacs(), searchData.getDiag() };
-        String[] itemsType = { "symptom", "other", "vital", "lis", "pacs", "diag" };
-        for (int i = 0; i < items.length; i++) {
-            if (items[i] != null) {
-                LexemePath<Lexeme> featureData = ParticipleUtil.participle(items[i]);
-                if (featureData != null) {
-                    addStandWord(featureData, ApplicationCacheUtil.getKl_result_mapping_standword_map(), searchData, itemsType[i]);
-                }
-            }
-        }
-        //所有信息参与推送
-        //        searchData.setSymptom(searchData.getSymptom() + searchData.getVital()
-        //                + searchData.getLis() + searchData.getPacs() + searchData.getPast() + searchData.getOther() + searchData.getIndications());
-        if (StringUtils.isNotEmpty(searchData.getSymptom())) {
-            searchData.setSymptom(searchData.getSymptom().trim());
-        }
-        //一次推送多个类别信息
-        String[] featureTypes = searchData.getFeatureType().split(",");
-        //featureType统一转换
-        String[] convertFeatureTypes = new String[featureTypes.length];
-        for (int i = 0; i < featureTypes.length; i++) {
-            convertFeatureTypes[i] = convertFeatureType(searchData.getSysCode(), featureTypes[i]);
-        }
-        searchData.setFeatureType(StringUtils.join(convertFeatureTypes, ","));
-        searchData.setFeatureTypes(convertFeatureTypes);
-
-        //获取入参中的特征信息
-        FeatureAnalyze fa = new FeatureAnalyze();
-        List<Map<String, Object>> featuresList = new ArrayList<>();
-        if (!StringUtils.isEmpty(searchData.getSymptom())) {
-            //提取现病史
-            featuresList = fa.start(searchData.getSymptom(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-            //提取时间信息
-            featuresList = fa.start(searchData.getSymptom(), FeatureType.TIME);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getVital())) {
-            //提取体征
-            featuresList = fa.start(searchData.getVital(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getPast())) {
-            //提取既往史
-            featuresList = fa.start(searchData.getPast(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getOther()) || !StringUtils.isEmpty(searchData.getIndications())) {
-            //提取其他史等
-            featuresList = fa.start((searchData.getOther() == null ? "" : searchData.getOther()) + (searchData.getIndications() == null ? "" : searchData.getIndications()), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getPacs())) {
-            featuresList = fa.start(searchData.getPacs(), FeatureType.PACS);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getLis())) {
-            featuresList = fa.start(searchData.getLis(), FeatureType.LIS);
-            paramFeatureInit(searchData, featuresList);
-        }
-        // 清洗特征词,去除词性不匹配的词
-        searchData = cleanFeature(featuresList, fa, searchData);
-        if (!StringUtils.isEmpty(searchData.getOther())) {
-            //如果既往史中诊断信息,需要提取这个特征
-            featuresList = fa.start(searchData.getOther(), FeatureType.DIAG);
-            paramFeatureInit(searchData, featuresList);
-        }
-
-        if (!StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom())) {
-            featuresList = fa.start(searchData.getDiag(), FeatureType.DIAG);
-            paramFeatureInit(searchData, featuresList);
-        }
-    }
-
-    /**
-     * featureType转算法模型类型
-     *
-     * @param sysCode
-     * @param featureTypes
-     * @param searchData
-     */
-    public AlgorithmClassify[] createAlgorithmClassify(String sysCode, String[] featureTypes, SearchData searchData) {
-        AlgorithmClassify[] classifies = new AlgorithmClassify[featureTypes.length];
-        //下了诊断且其他信息全为空 反推标识
-        boolean reverse = !StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom());
-        for (int i = 0; i < featureTypes.length; i++) {
-            //            featureTypes[i] = convertFeatureType(sysCode, featureTypes[i]);
-            if (featureTypes[i] != null) {
-                //模型
-                switch (FeatureType.parse(featureTypes[i])) {
-                    case SYMPTOM:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_SYMPTOM;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_SYMPTOM;
-                        }
-                        break;
-                    case DIAG:
-                        if (reverse) {
-                            classifies[i] = null;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG;
-                        }
-                        break;
-                    case VITAL:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_VITAL;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_VITAL;
-                        }
-                        break;
-                    case LIS:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_LIS;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_LIS;
-                        }
-                        break;
-                    case PACS:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_PACS;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_PACS;
-                        }
-                        break;
-                    case TREAT:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_TREAT;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_TREAT;
-                        }
-                        break;
-                    case HISTORY:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_HISTORY;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_HISTORY;
-                        }
-                        break;
-                }
-            }
-        }
-        return classifies;
-    }
-
-    /**
-     * 外部系统featureType需要转化为大数据定义的featureType
-     *
-     * @param sysCode
-     * @param featureType
-     * @return
-     */
-    private String convertFeatureType(String sysCode, String featureType) {
-        if (StringUtils.isEmpty(sysCode) || sysCode.equals("1")) {
-            if ("1".equals(featureType)) {
-                return BigDataConstants.feature_type_symptom;
-            }
-            if ("7".equals(featureType)) {
-                return BigDataConstants.feature_type_diag;
-            }
-            if ("4".equals(featureType)) {
-                return BigDataConstants.feature_type_vital;
-            }
-            if ("5".equals(featureType)) {
-                return BigDataConstants.feature_type_lis;
-            }
-            if ("6".equals(featureType)) {
-                return BigDataConstants.feature_type_pacs;
-            }
-            if ("3".equals(featureType)) {
-                return BigDataConstants.feature_type_history;
-            }
-            if ("8".equals(featureType)) {
-                return BigDataConstants.feature_type_treat;
-            }
-            if ("22".equals(featureType)) {
-                return BigDataConstants.feature_type_labelpush;
-            }
-            if ("11".equals(featureType)) {
-                return BigDataConstants.feature_type_manju;
-            }
-            if ("42".equals(featureType)) {
-                return BigDataConstants.feature_type_vital_index;
-            }
-            return null;
-        }
-        return featureType;
-    }
-
-    /**
-     * 推送模型入参
-     *
-     * @param searchData
-     * @throws Exception
-     */
-    private void paramFeatureInit(SearchData searchData, List<Map<String, Object>> featuresList) throws Exception {
-        if (featuresList != null && featuresList.size() > 0) {
-            Map<String, Object> featureMap = null;
-            for (int i = 0; i < featuresList.size(); i++) {
-                featureMap = featuresList.get(i);
-                Map<String, String> map = new HashMap<>();
-                for (Map.Entry<String, Object> entry : featureMap.entrySet()) {
-                    map.put(entry.getKey(), String.valueOf(entry.getValue()));
-                }
-                map.put("featureType", String.valueOf(featureMap.get("feature_type")));
-                map.put("featureName", String.valueOf(featureMap.get("feature_name")));
-                map.put("property", String.valueOf(featureMap.get("property")));
-                map.put("concept", String.valueOf(featureMap.get("concept")));
-                if (Constants.default_negative.equals(featureMap.get("negative"))) {
-                    if (map.get("featureType").equals(Constants.feature_type_time)) {
-                        searchData.getInputs().put("时间", map);
-                    } else {
-                        if (searchData.getInputs().get(map.get("feature_name")) == null) {
-                            if (i < 5) {
-                                searchData.getInputs().put(map.get("feature_name"), map);
-                            }
-                            searchData.getGraphInputs().put(map.get("feature_name"), map);
-                        }
-                    }
-                } else {
-                    searchData.getFilters().put(map.get("feature_name"), map);
-                }
-            }
-        }
-    }
-
-    /**
-     * 给SearchData中症状末尾添加诊断依据标准词
-     *
-     * @param lexemes
-     * @param standWords
-     * @param sData
-     * @return
-     */
-    public SearchData addStandWord(List<Lexeme> lexemes, Map<String, List<Map<String, String>>> standWords, SearchData sData, String itemType) {
-        List<Lexeme> feature = new ArrayList<>();
-
-        //收集分词结果中体征指标或体征指标值(数字)
-        for (Lexeme lexeme : lexemes) {
-            if (lexeme.getProperty().contains(",")) {
-                ApplicationCacheUtil.setProterty(lexeme); //如果分词后词性有多个,只选一个(暂时只处理症状,体征)
-            }
-            NegativeEnum lexemeNegativeEnum = NegativeEnum.parseOfValue(lexeme.getProperty());
-            for (int i = 0; i < negativeEnums.length; i++) {
-                if (lexemeNegativeEnum == negativeEnums[i]) {
-                    feature.add(lexeme);
-                    break;
-                }
-            }
-        }
-        //根据收集到的分词结果把体征指标和对应体征指标值(数字)拼接
-        List<String> featureType = new ArrayList<>();
-
-        for (int i = 0; i < feature.size(); i++) {
-            boolean featureTypeState = true;
-            boolean featureTypeStatus = false;
-            if (i < feature.size() - 2) {
-                for (int j = 0; j < negativeEnumTriple.length; j++) {
-                    String featureText = "";
-                    for (int k = 0; k < negativeEnumTriple[j].length; k++) {
-                        if (NegativeEnum.parseOfValue(feature.get(i + k).getProperty()) == negativeEnumTriple[j][k]) {
-                            featureTypeStatus = true;
-                            featureText += "\t" + feature.get(i + k).getText();
-                        } else {
-                            featureTypeStatus = false;
-                            break;
-                        }
-                    }
-                    if (featureTypeStatus) {
-                        featureType.add(featureText);
-                        featureTypeState = false;
-                    }
-                }
-            }
-            if (featureTypeState && i < feature.size() - 1) {
-                for (int j = 0; j < negativeEnumTwoTuple.length; j++) {
-                    String featureText = "";
-                    for (int k = 0; k < negativeEnumTwoTuple[j].length; k++) {
-                        if (NegativeEnum.parseOfValue(feature.get(i + k).getProperty()) == negativeEnumTwoTuple[j][k]) {
-                            featureTypeStatus = true;
-                            featureText += "\t" + feature.get(i + k).getText();
-                        } else {
-                            featureTypeStatus = false;
-                            break;
-                        }
-                    }
-                    if (featureTypeStatus) {
-                        featureType.add(featureText);
-                    }
-                }
-            }
-        }
-        //将标准词中体征指标值(数字)与分词结果中体征指标值(数字)比较
-        String newStandWord = "";
-        for (String f : featureType) {
-            String[] features = f.trim().split("\t");
-            if (standWords.containsKey(features[0])) {
-                List<Map<String, String>> standWordList = standWords.get(features[0]);
-                for (Map<String, String> standWordMap : standWordList) {
-                    if (standWordMap.containsKey("unit") && standWordMap.containsKey("value")) {
-                        if (features.length == 2) {
-                            newStandWord = judgment(features, standWordMap, newStandWord, sData, itemType);
-                        } else {
-                            if (standWordMap.get("unit").equals(features[2].toLowerCase())) {
-                                newStandWord = judgment(features, standWordMap, newStandWord, sData, itemType);
-                            }
-                        }
-                    } else if (standWordMap.containsKey("value")) {
-                        if (features.length == 2) {
-                            newStandWord = judgment(features, standWordMap, newStandWord, sData, itemType);
-                        }
-                    }
-                }
-            }
-        }
-        //血压既满足血压≥140/90mmHg,又满足血压小于90/60mmHg时,只取前者
-        String addStandWords = "";
-        String smallerStandWord = "";
-        boolean flag = true;
-        for (String standWord : newStandWord.split(",")) {
-            if (!"".equals(standWord) || standWord.length() > 0) {
-                if (standWord.contains("<") || standWord.contains("<=") || standWord.contains("小于")) {
-                    smallerStandWord += "," + standWord;
-                } else {
-                    addStandWords += "," + proxy(standWord);
-                    flag = false;
-                }
-            }
-        }
-        if (flag) {
-            addStandWords += smallerStandWord;
-        }
-        addbloodPressure(sData, itemType, addStandWords);
-        return sData;
-    }
-
-
-    /**
-     * 将标准词中体征指标值(数字)与分词结果中体征指标值(数字)比较
-     * 除了血压>140/90mmHg类似标准词,其他标准词直接添加在症状后面
-     *
-     * @param features
-     * @param standWordMap
-     * @param standWord
-     * @param sData
-     * @return 血压>140/90mmHg或血压小于90/60mmHg或同时返回,在addStandWord()中进一步处理
-     */
-    private String judgment(String[] features, Map<String, String> standWordMap, String standWord, SearchData sData, String itemType) {
-        if (hasDigit(features[1])) {
-            try {
-                if (">".equals(standWordMap.get("op"))) {
-                    //单独处理  血压>140/90mmHg   类似情况
-                    if (features[1].contains("/")) {
-                        if (standWordMap.get("value").contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP > standWordSBP || featuresDBP > standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        //"symptom","other","vital","lis","pacs","diag"
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) > Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                } else if ("<".equals(standWordMap.get("op"))) {
-                    //单独处理  血压小于90/60mmHg   类似情况
-                    if (standWordMap.get("value").contains("/")) {
-                        if (features[1].contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP < standWordSBP || featuresDBP < standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) < Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                } else if (">=".equals(standWordMap.get("op"))) {
-                    //单独处理  血压大于等于140/90mmHg   类似情况
-                    if (standWordMap.get("value").contains("/")) {
-                        if (features[1].contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP >= standWordSBP || featuresDBP >= standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) >= Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                } else if ("<=".equals(standWordMap.get("op"))) {
-                    //单独处理  血压小于等于90/60mmHg   类似情况
-                    if (standWordMap.get("value").contains("/")) {
-                        if (features[1].contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP <= standWordSBP || featuresDBP <= standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) <= Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        return standWord;
-    }
-
-    /**
-     * 根据不同项目添加标准词
-     *
-     * @param standWordMap
-     * @param sData
-     * @param itemType
-     */
-    private void setStandword(Map<String, String> standWordMap, SearchData sData, String itemType) {
-        switch (itemType) {
-            case "symptom":
-                if (sData.getSymptom().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setSymptom(sData.getSymptom() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "other":
-                if (sData.getOther().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setOther(sData.getOther() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "vital":
-                if (sData.getVital().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setVital(sData.getVital() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "lis":
-                if (sData.getLis().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setLis(sData.getLis() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "pacs":
-                if (sData.getPacs().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setPacs(sData.getPacs() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "diag":
-                if (sData.getDiag().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setDiag(sData.getDiag() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-        }
-    }
-
-    /**
-     * 添加血压(血压既满足血压≥140/90mmHg,又满足血压小于90/60mmHg时,只取前者)
-     *
-     * @param sData
-     * @param itemType
-     * @param addStandWords
-     */
-    private void addbloodPressure(SearchData sData, String itemType, String addStandWords) {
-        switch (itemType) {
-            case "symptom":
-                if (sData.getSymptom().indexOf(addStandWords) == -1) {
-                    sData.setSymptom(sData.getSymptom() + "," + addStandWords);
-                }
-                break;
-            case "other":
-                if (sData.getOther().indexOf(addStandWords) == -1) {
-                    sData.setOther(sData.getOther() + "," + addStandWords);
-                }
-                break;
-            case "vital":
-                if (sData.getVital().indexOf(addStandWords) == -1) {
-                    sData.setVital(sData.getVital() + "," + addStandWords);
-                }
-                break;
-            case "lis":
-                if (sData.getLis().indexOf(addStandWords) == -1) {
-                    sData.setLis(sData.getLis() + "," + addStandWords);
-                }
-                break;
-            case "pacs":
-                if (sData.getPacs().indexOf(addStandWords) == -1) {
-                    sData.setPacs(sData.getPacs() + "," + addStandWords);
-                }
-                break;
-            case "diag":
-                if (sData.getDiag().indexOf(addStandWords) == -1) {
-                    sData.setDiag(sData.getDiag() + "," + addStandWords);
-                }
-                break;
-        }
-    }
-
-    /**
-     * 判断分词后的特征中是否含有数字
-     *
-     * @param content
-     * @return
-     */
-    private boolean hasDigit(String content) {
-        boolean flag = false;
-        if (Pattern.compile(".*\\d+.*").matcher(content).matches()) {
-            flag = true;
-        }
-        return flag;
-    }
-
-    /**
-     * 将字符串中的数字提取出来,针对分词结果中"90."类似情况
-     *
-     * @param standWord
-     * @return
-     */
-    private String getNum(String standWord) {
-        StringBuffer sb = new StringBuffer();
-        for (String num : standWord.replaceAll("[^0-9]", ",").split(",")) {
-            if (num.length() > 0) {
-                sb.append(num);
-            }
-        }
-        return sb.toString();
-    }
-
-    /**
-     * 将血压超过标准值的标准词改为血压升高
-     *
-     * @param standWord
-     * @return
-     */
-    private String proxy(String standWord) {
-        if (standWord.contains("压") && (standWord.contains("≥") || standWord.contains("大于"))) {
-            standWord = "血压升高";
-        } else if (standWord.contains("心率") && (standWord.contains("大于") || standWord.contains("超过"))) {
-            standWord = "心率快";
-        }
-        return standWord;
-    }
-
-    private SearchData cleanFeature(List<Map<String, Object>> featuresList, FeatureAnalyze fa,
-                                    SearchData searchData) {
-        // 在输入的辅检文本中,只提取辅检信息
-        String[] PACS_Feature = { Constants.word_property_PACS,
-                Constants.word_property_PACS_Detail, Constants.word_property_PACS_Result };
-        searchData = removeFeature(searchData.getLis(), fa, searchData, PACS_Feature, FeatureType.PACS);
-
-        // 在输入的化验文本中,只提取化验信息
-        String[] LIS_Feature = { Constants.word_property_LIS,
-                Constants.word_property_LIS_Detail, Constants.word_property_LIS_Result };
-        searchData = removeFeature(searchData.getPacs(), fa, searchData, LIS_Feature, FeatureType.LIS);
-
-        return searchData;
-    }
-
-    private SearchData removeFeature(String text, FeatureAnalyze fa,
-                                     SearchData searchData, String[] properties, FeatureType featureType) {
-        String name = "";
-        Boolean related = false;
-
-        try {
-            List<Map<String, Object>> featureList = fa.start(text, featureType);
-            if (featureList != null) {
-                for (Map<String, Object> item : featureList) {
-                    name = item.get("feature_name").toString();
-                    String[] property = item.get("property").toString().split(",");
-                    for (String prop : property) {
-                        if (Arrays.asList(properties).contains(prop)) {
-                            //                            related = true;
-                            searchData.getInputs().remove(name);
-                            break;
-                        }
-                    }
-
-                    //                    if (!related) {
-                    //                        searchData.getInputs().remove(name);
-                    //                    }
-                    //9
-                    //                    related = false;
-                }
-            }
-
-        } catch (Exception ex) {
-            ex.printStackTrace();
-        } finally {
-            return searchData;
-        }
-    }
-}

+ 3 - 97
bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java

@@ -1,10 +1,9 @@
 package org.diagbot.bigdata.work;
 
 import org.apache.commons.lang3.StringUtils;
-import org.diagbot.bigdata.common.ApplicationCacheUtil;
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.common.work.FeatureRate;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResultMappingFilter;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
 import org.diagbot.nlp.feature.FeatureType;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.nlp.util.NegativeEnum;
@@ -29,7 +28,6 @@ public class ResultDataProxy {
         DecimalFormat df = new DecimalFormat("0.####");
         List<FeatureRate> featureList = new ArrayList<>(10);
 
-//        Map<String, String> resultMappingVitalMap = ApplicationCacheUtil.getDoc_result_mapping_vital_map();
         Map<String, String> resultMappingDiagMap = ApplicationCacheUtil.getDoc_result_mapping_diag_map();
         Map<String, Map<String, ResultMappingFilter>> resultMappingFilterMap = ApplicationCacheUtil.getDoc_result_mapping_filter_map();
         Map<String, Map<String, String>> synonymMap = ApplicationCacheUtil.getStandard_info_synonym_map();
@@ -67,19 +65,11 @@ public class ResultDataProxy {
                 }
             }
             featureList.add(featureRate);
-//            if (!featureType.equals(Constants.feature_type_diag)) {
                 if (cursor < searchData.getLength()) {
                     cursor++;
                 } else {
                     break;
                 }
-//            } else {            //诊断最多返回5个
-//                if (cursor < 5) {
-//                    cursor++;
-//                } else {
-//                    break;
-//                }
-//            }
         }
 
         return featureList;
@@ -149,88 +139,4 @@ public class ResultDataProxy {
         }
         return result;
     }
-
-    /**
-     * 大小类数据合并
-     *
-     * @param request
-     * @param map
-     */
-    public Map<String, Float> resultMerge(HttpServletRequest request, Map<String, Float> map) {
-        Map<String, NlpCache.Node> nodesMap = NlpCache.getStandard_info_type_tree_map();
-        Map<String, Float> resultMap = new HashMap<>();
-        //设定阀值
-        float threshold = 0.001f;
-        Map<String, Float> thresholdMap = new HashMap<>();
-        for (Map.Entry<String, Float> entry : map.entrySet()) {
-            if (!"null".equals(entry.getKey()) && entry.getValue() >= threshold) {
-                thresholdMap.put(entry.getKey(), entry.getValue());
-            }
-        }
-
-        NlpCache.Node node = null;
-        List<String> delList = new ArrayList<>();
-        for (Map.Entry<String, Float> entry : thresholdMap.entrySet()) {
-            if (delList.contains(entry.getKey())) continue;
-
-            node = nodesMap.get(entry.getKey());
-            if (node != null) {
-                String topName = node.getName();
-                NlpCache.Node p = node.getParent();
-                if (p != null && nodesMap.get(p.getName()) != null) {
-                    topName = p.getName();
-                }
-                while (p != null) {
-                    List<String> nodeNamesList = new ArrayList<>();
-                    lookChilds(topName, p, thresholdMap, nodeNamesList);
-                    if (nodeNamesList.size() > 0) {
-                        topName = p.getName();
-                    }
-                    p = p.getParent();
-                }
-
-                if (thresholdMap.get(topName) != null) {
-                    resultMap.put(topName, thresholdMap.get(topName));
-                    delList.add(topName);
-                }
-                NlpCache.Node topNode = nodesMap.get(topName);
-                lookChildsAndCal(resultMap, thresholdMap, topNode, delList, topNode.getName());
-                delList.add(topName);
-            } else {
-                resultMap.put(entry.getKey(), entry.getValue());
-            }
-        }
-        return resultMap;
-    }
-
-    private void lookChilds(String own, NlpCache.Node p, Map<String, Float> thresholdMap, List<String> nodeNamesList) {
-        for (NlpCache.Node n : p.getChilds()) {
-            if (own.equals(n.getName())) {
-                continue;
-            } else {
-                if (thresholdMap.get(n.getName()) != null) {
-                    nodeNamesList.add(n.getName());
-                }
-                if (n.getChilds().size() > 0) {
-                    lookChilds("", n, thresholdMap, nodeNamesList);
-                }
-            }
-        }
-    }
-
-    private void lookChildsAndCal(Map<String, Float> resultMap, Map<String, Float> thresholdMap, NlpCache.Node node, List<String> delList, String topName) {
-        for (NlpCache.Node n : node.getChilds()) {
-            if (thresholdMap.get(n.getName()) != null) {
-                if (resultMap.get(topName) == null) {
-                    resultMap.put(topName, thresholdMap.get(n.getName()));
-                } else {
-                    resultMap.put(topName, resultMap.get(topName) + thresholdMap.get(n.getName()));
-                }
-                delList.add(n.getName());
-            }
-            if (n.getChilds().size() > 0) {
-                lookChildsAndCal(resultMap, thresholdMap, n, delList, topName);
-            }
-        }
-    }
 }

+ 37 - 41
bigdata-web/src/test/java/org/diagbot/AddStandWordTest.java

@@ -1,8 +1,8 @@
 package org.diagbot;
 
-import org.diagbot.bigdata.common.ApplicationCacheUtil;
-import org.diagbot.bigdata.work.ParamsDataProxy;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.bigdata.work.BigDataParamsProxy;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
 import org.diagbot.nlp.participle.ParticipleUtil;
 import org.diagbot.nlp.participle.cfg.Configuration;
 import org.diagbot.nlp.participle.cfg.DefaultConfig;
@@ -11,10 +11,6 @@ import org.diagbot.nlp.participle.word.LexemePath;
 
 import java.io.IOException;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 /**
  * @Description:
@@ -47,39 +43,39 @@ public class AddStandWordTest {
         long seconds = endTime - startTime;
         System.out.println("添加标准词使用了:"+splitSeconds + "毫秒.");
         System.out.println("处理文本总共使用了:"+seconds + "毫秒.");*/
-        for (int j = 1; j < 11; j++) {
-
-
-            SearchData searchData = new SearchData();
-            ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
-            searchData.setSymptom("安静时心率98次/分");
-            searchData.setOther("心率156次/分");
-            searchData.setVital("男性43岁");
-            searchData.setLis("Hachinski缺血积分2分");
-            searchData.setPacs("病程9个月");
-            searchData.setDiag("BMI12");
-            String[] items = { searchData.getSymptom(), searchData.getOther(), searchData.getVital()
-                    , searchData.getLis(), searchData.getPacs(), searchData.getDiag() };
-            String[] itemsType = { "symptom", "other", "vital", "lis", "pacs", "diag" };
-            long startTime = System.currentTimeMillis();
-            for (int i = 0; i < items.length; i++) {
-                if (items[i] != null) {
-                    LexemePath<Lexeme> featureData = ParticipleUtil.participle(items[i]);
-                    if (featureData != null) {
-                        paramsDataProxy.addStandWord(featureData, ApplicationCacheUtil.getKl_result_mapping_standword_map(), searchData, itemsType[i]);
-                    }
-                }
-            }
-            System.out.println(searchData.getSymptom());
-            System.out.println(searchData.getOther());
-            System.out.println(searchData.getVital());
-            System.out.println(searchData.getLis());
-            System.out.println(searchData.getPacs());
-            System.out.println(searchData.getDiag());
-            long endTime = System.currentTimeMillis();
-            System.out.println("处理文本总共使用了:" + (endTime - startTime) + "毫秒.");
-            System.out.println("---------------------------第" + j + "次处理---------------------------------");
-        }
+//        for (int j = 1; j < 11; j++) {
+//
+//
+//            SearchData searchData = new SearchData();
+//            BigDataParamsProxy paramsDataProxy = new BigDataParamsProxy();
+//            searchData.setSymptom("安静时心率98次/分");
+//            searchData.setOther("心率156次/分");
+//            searchData.setVital("男性43岁");
+//            searchData.setLis("Hachinski缺血积分2分");
+//            searchData.setPacs("病程9个月");
+//            searchData.setDiag("BMI12");
+//            String[] items = { searchData.getSymptom(), searchData.getOther(), searchData.getVital()
+//                    , searchData.getLis(), searchData.getPacs(), searchData.getDiag() };
+//            String[] itemsType = { "symptom", "other", "vital", "lis", "pacs", "diag" };
+//            long startTime = System.currentTimeMillis();
+//            for (int i = 0; i < items.length; i++) {
+//                if (items[i] != null) {
+//                    LexemePath<Lexeme> featureData = ParticipleUtil.participle(items[i]);
+//                    if (featureData != null) {
+//                        paramsDataProxy.addStandWord(featureData, ApplicationCacheUtil.getKl_result_mapping_standword_map(), searchData, itemsType[i]);
+//                    }
+//                }
+//            }
+//            System.out.println(searchData.getSymptom());
+//            System.out.println(searchData.getOther());
+//            System.out.println(searchData.getVital());
+//            System.out.println(searchData.getLis());
+//            System.out.println(searchData.getPacs());
+//            System.out.println(searchData.getDiag());
+//            long endTime = System.currentTimeMillis();
+//            System.out.println("处理文本总共使用了:" + (endTime - startTime) + "毫秒.");
+//            System.out.println("---------------------------第" + j + "次处理---------------------------------");
+//        }
         //        String s = "\tsafGG\tAFASSADG";
         //        System.out.println(s);
         //        System.out.println(s.trim());
@@ -122,7 +118,7 @@ public class AddStandWordTest {
             int i = 1;
             for (Lexeme lexeme : lexemes) {
                 if (lexeme.getProperty().contains(",")) {
-                    ApplicationCacheUtil.setProterty(lexeme);
+//                    ApplicationCacheUtil.setProterty(lexeme);
                 }
                 if (lexemes.size() != i) {
                     System.out.print(lexeme.getText() + "(" + lexeme.getProperty() + ")|");

+ 135 - 0
bigdata-web/src/test/java/org/diagbot/EyeHospitalData.java

@@ -0,0 +1,135 @@
+package org.diagbot;
+
+import org.diagbot.pub.jdbc.MysqlJdbc;
+
+import java.io.BufferedReader;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.sql.*;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.Date;
+import java.util.stream.Collectors;
+
+/**
+ * Created by louhr on 2019/9/3.
+ */
+public class EyeHospitalData {
+
+    public static void main(String[] args) {
+//        EyeHospitalData ehl = new EyeHospitalData();
+//        Connection conn = ehl.createOracleJdbc();
+//        ehl.queryHospitalInput(conn);
+    }
+
+//    private List<Map<String, Object>> queryHospitalInput(Connection conn) {
+//        PreparedStatement pstmt = null;
+//        ResultSet rs = null;
+//        List<Map<String, Object>> list = new ArrayList<>();
+//        String ipid = "";
+//        try {
+//
+//            DateFormat df = new SimpleDateFormat("yyyyMMdd");
+//
+//            Calendar cal = Calendar.getInstance();
+//            cal.setTime(new Date());
+//            String end_time = df.format(cal.getTime());
+//
+//            cal.add(Calendar.DATE, -7);
+//            String start_time = df.format(cal.getTime());
+//
+//
+//            int record_cnt = 1;
+//
+//            while (start_time.compareTo("20190801") > -1) {
+//                System.out.println(start_time + "..." + end_time);
+//
+//                String sql = "select xml_cont, ipid, pid, dept_name, dept_code, create_time from inpcase.hospital_record " +
+//                        "where substr(create_time, 0, 8) > '" + start_time + "' and substr(create_time, 0, 8) <= '" + end_time + "'";
+//                pstmt = conn.prepareStatement(sql);
+//                //建立一个结果集,用来保存查询出来的结果
+//                rs = pstmt.executeQuery();
+//
+//
+//                while (rs.next()) {
+//                    if (record_cnt % 100 == 0) {
+//                        System.out.println("已查询" + record_cnt + "行数据!");
+//                    }
+//                    Map<String, Object> map = new HashMap<>();
+//                    OracleResultSet ors = (OracleResultSet) rs;
+//                    OPAQUE op = ors.getOPAQUE(1);
+//                    ipid = ors.getString(2);
+//                    String pid = ors.getString(3);
+//                    String dept_name = ors.getString(4);
+//                    String dept_code = ors.getString(5);
+//                    String create_time = ors.getString(6);
+//
+//                    XMLType xml = XMLType.createXML(op);
+//                    String xml_cont = xml.getStringVal();
+//                    xml_cont = xml_cont.substring(xml_cont.indexOf("<text>") + 6, xml_cont.indexOf("</text>"));
+//
+//                    String sex = xml_cont.substring(xml_cont.indexOf("性  别:") + 5, xml_cont.indexOf("性  别:") + 8);
+//                    String age = xml_cont.substring(xml_cont.indexOf("年  龄:") + 5, xml_cont.indexOf("年  龄:") + 8);
+//                    String marry = xml_cont.substring(xml_cont.indexOf("婚  姻:") + 5, xml_cont.indexOf("婚  姻:") + 8);
+//                    String in_hospital = xml_cont.substring(xml_cont.indexOf("入院日期:") + 5, xml_cont.indexOf("入院日期:") + 22);
+//                    String content = xml_cont.substring(xml_cont.indexOf("主  诉:"), xml_cont.indexOf("医师签名:"));
+//
+//                    map.put("ipid", ipid);
+//                    map.put("pid", pid);
+//                    map.put("dept_name", dept_name);
+//                    map.put("dept_code", dept_code);
+//                    map.put("create_time", create_time);
+//                    map.put("sex", sex);
+//                    map.put("age", age);
+//                    map.put("marry", marry);
+//                    map.put("in_hospital", in_hospital);
+//                    map.put("content", content);
+//
+//                    System.out.println(sex);
+//                    System.out.println(age);
+//                    System.out.println(marry);
+//                    System.out.println(in_hospital);
+//                    System.out.println(content);
+//
+//                    list.add(map);
+//
+//                    record_cnt++;
+//                }
+//
+//                end_time = start_time;
+//                cal.add(Calendar.DATE, -7);
+//                start_time = df.format(cal.getTime());
+//            }
+//        } catch (Exception e) {
+//            System.out.println(ipid);
+//            e.printStackTrace();
+//        } finally {
+//            try {
+//                rs.close();
+//                pstmt.close();
+//            }catch (SQLException sqle) {
+//                sqle.printStackTrace();
+//            }
+//        }
+//        return list;
+//    }
+//
+//    private void insertMysql(List<Map<String, Object>> list) {
+//        MysqlJdbc nlpJdbc = new MysqlJdbc("root", "", "jdbc:mysql://127.0.0.1:3306/eye_hospital?useUnicode=true&characterEncoding=UTF-8");
+//        nlpJdbc.insert(list, "hospital_record", new String[]{"ipid", "pid", "dept_name", "dept_code", "create_time", "sex", "age", "marry", "in_hospital", "content"});
+//    }
+//
+//    private Connection createOracleJdbc() {
+//        Connection conn = null;
+//        try {
+//            Class.forName("oracle.jdbc.driver.OracleDriver");
+//            conn = DriverManager.getConnection("jdbc:oracle:thin:@//172.17.1.143:1521/orc1",
+//                    "louhr", "louhr");
+//            return conn;
+//        } catch (Exception e) {
+//            e.printStackTrace();
+//        }
+//        return conn;
+//    }
+}

+ 8 - 2
common-push/pom.xml

@@ -22,11 +22,17 @@
             <artifactId>public</artifactId>
             <version>${project.version}</version>
         </dependency>
+
+        <dependency>
+            <groupId>org.diagbot</groupId>
+            <artifactId>nlp</artifactId>
+            <version>1.0.0</version>
+        </dependency>
+
         <dependency>
             <groupId>org.diagbot</groupId>
-            <artifactId>graph</artifactId>
+            <artifactId>common-service</artifactId>
             <version>1.0.0</version>
-            <scope>compile</scope>
         </dependency>
     </dependencies>
 

+ 0 - 11
common-push/src/main/java/org/diagbot/common/push/Test.java

@@ -1,11 +0,0 @@
-package org.diagbot.common.push;
-
-/**
- * @ClassName org.diagbot.common.push.Test
- * @Description TODO
- * @Author fyeman
- * @Date 2019/8/5/005 17:07
- * @Version 1.0
- **/
-public class Test {
-}

+ 34 - 0
common-push/src/main/java/org/diagbot/common/push/bean/CrisisDetail.java

@@ -0,0 +1,34 @@
+package org.diagbot.common.push.bean;
+
+/**
+ * Created by louhr on 2019/8/31.
+ */
+public class CrisisDetail {
+    private String remindText;
+    private String standardText;
+    private String originText;
+
+    public String getRemindText() {
+        return remindText;
+    }
+
+    public void setRemindText(String remindText) {
+        this.remindText = remindText;
+    }
+
+    public String getStandardText() {
+        return standardText;
+    }
+
+    public void setStandardText(String standardText) {
+        this.standardText = standardText;
+    }
+
+    public String getOriginText() {
+        return originText;
+    }
+
+    public void setOriginText(String originText) {
+        this.originText = originText;
+    }
+}

+ 1 - 1
common-service/src/main/java/org/diagbot/common/work/FeatureRate.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.work;
+package org.diagbot.common.push.bean;
 
 /**
  * Created by fyeman on 2018/1/17.

+ 1 - 1
common-service/src/main/java/org/diagbot/common/work/LisDetail.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.work;
+package org.diagbot.common.push.bean;
 
 public class LisDetail {
     private String detailName;

+ 79 - 0
common-push/src/main/java/org/diagbot/common/push/bean/PreResult.java

@@ -0,0 +1,79 @@
+package org.diagbot.common.push.bean;
+
+/**
+ * Created by louhr on 2019/8/31.
+ */
+public class PreResult {
+    private String detailName = "";
+    private String uniqueName = "";
+    private String maxValue = "";
+    private String minValue = "";
+    private String name = "";
+    private String otherValue = "";
+    private String units = "";
+    private String value = "";
+
+    public String getDetailName() {
+        return detailName;
+    }
+
+    public void setDetailName(String detailName) {
+        this.detailName = detailName;
+    }
+
+    public String getUniqueName() {
+        return uniqueName;
+    }
+
+    public void setUniqueName(String uniqueName) {
+        this.uniqueName = uniqueName;
+    }
+
+    public String getMaxValue() {
+        return maxValue;
+    }
+
+    public void setMaxValue(String maxValue) {
+        this.maxValue = maxValue;
+    }
+
+    public String getMinValue() {
+        return minValue;
+    }
+
+    public void setMinValue(String minValue) {
+        this.minValue = minValue;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public String getOtherValue() {
+        return otherValue;
+    }
+
+    public void setOtherValue(String otherValue) {
+        this.otherValue = otherValue;
+    }
+
+    public String getUnits() {
+        return units;
+    }
+
+    public void setUnits(String units) {
+        this.units = units;
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+}

+ 14 - 4
common-service/src/main/java/org/diagbot/common/work/ResponseData.java

@@ -1,7 +1,7 @@
-package org.diagbot.common.work;
-import com.alibaba.fastjson.JSONObject;
-import org.diagbot.common.javabean.Filnlly;
-import org.diagbot.common.javabean.MedicalIndication;
+package org.diagbot.common.push.bean;
+
+import org.diagbot.common.push.bean.neo4j.Filnlly;
+import org.diagbot.common.push.bean.neo4j.MedicalIndication;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -31,6 +31,8 @@ public class ResponseData {
     private List<String> diffDiag;//鉴别诊断
     private List<String> excludeDiag; //排除诊断
 
+    private Map<String, List<CrisisDetail>> crisisDetails = new HashMap<>();
+
     public List<String> getDiffDiag() {
         return diffDiag;
     }
@@ -160,4 +162,12 @@ public class ResponseData {
     public void setBeforeCombineDis(List<FeatureRate> beforeCombineDis) {
         this.beforeCombineDis = beforeCombineDis;
     }
+
+    public Map<String, List<CrisisDetail>> getCrisisDetails() {
+        return crisisDetails;
+    }
+
+    public void setCrisisDetails(Map<String, List<CrisisDetail>> crisisDetails) {
+        this.crisisDetails = crisisDetails;
+    }
 }

+ 1 - 1
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingFilter.java

@@ -1,4 +1,4 @@
-package org.diagbot.bigdata.dao.model;
+package org.diagbot.common.push.bean;
 
 
 public class ResultMappingFilter {

+ 143 - 0
common-push/src/main/java/org/diagbot/common/push/bean/Rule.java

@@ -0,0 +1,143 @@
+package org.diagbot.common.push.bean;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * 触发规则
+ * @author Mark Huang
+ * @since 27/08/2019
+ */
+@Getter
+@Setter
+public class Rule {
+    private String id = "";
+    // 大类名称
+    private String pub_name = "";
+    // 最小值比较符
+    private String min_operator = "";
+    // 最小值
+    private String min_value = "";
+    // 最小值单位
+    private String min_unit = "";
+    // 最大值比较符
+    private String max_operator = "";
+    // 最大值
+    private String max_value = "";
+    // 最大值单位
+    private String max_unit = "";
+    // 标准值 用作等于
+    private String eq_operator = "";
+    // 标准值
+    private String eq_value = "";
+    // 标准值单位
+    private String eq_unit = "";
+    //提醒信息
+    private String remind = "";
+    //提醒信息
+    private String originText = "";
+
+    public String getId() {
+        return id;
+    }
+
+    public void setId(String id) {
+        this.id = id;
+    }
+
+    public String getPub_name() {
+        return pub_name;
+    }
+
+    public void setPub_name(String pub_name) {
+        this.pub_name = pub_name;
+    }
+
+    public String getMin_operator() {
+        return min_operator;
+    }
+
+    public void setMin_operator(String min_operator) {
+        this.min_operator = min_operator;
+    }
+
+    public String getMin_value() {
+        return min_value;
+    }
+
+    public void setMin_value(String min_value) {
+        this.min_value = min_value;
+    }
+
+    public String getMin_unit() {
+        return min_unit;
+    }
+
+    public void setMin_unit(String min_unit) {
+        this.min_unit = min_unit;
+    }
+
+    public String getMax_operator() {
+        return max_operator;
+    }
+
+    public void setMax_operator(String max_operator) {
+        this.max_operator = max_operator;
+    }
+
+    public String getMax_value() {
+        return max_value;
+    }
+
+    public void setMax_value(String max_value) {
+        this.max_value = max_value;
+    }
+
+    public String getMax_unit() {
+        return max_unit;
+    }
+
+    public void setMax_unit(String max_unit) {
+        this.max_unit = max_unit;
+    }
+
+    public String getEq_operator() {
+        return eq_operator;
+    }
+
+    public void setEq_operator(String eq_operator) {
+        this.eq_operator = eq_operator;
+    }
+
+    public String getEq_value() {
+        return eq_value;
+    }
+
+    public void setEq_value(String eq_value) {
+        this.eq_value = eq_value;
+    }
+
+    public String getEq_unit() {
+        return eq_unit;
+    }
+
+    public void setEq_unit(String eq_unit) {
+        this.eq_unit = eq_unit;
+    }
+
+    public String getRemind() {
+        return remind;
+    }
+
+    public void setRemind(String remind) {
+        this.remind = remind;
+    }
+
+    public String getOriginText() {
+        return originText;
+    }
+
+    public void setOriginText(String originText) {
+        this.originText = originText;
+    }
+}

+ 45 - 0
common-push/src/main/java/org/diagbot/common/push/bean/RuleApp.java

@@ -0,0 +1,45 @@
+package org.diagbot.common.push.bean;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/9/3 13:21
+ */
+public class RuleApp {
+    private String id;
+    private String ruleIds;
+    private String typeId;
+    private String remind;
+
+    public String getId() {
+        return id;
+    }
+
+    public void setId(String id) {
+        this.id = id;
+    }
+
+    public String getRuleIds() {
+        return ruleIds;
+    }
+
+    public void setRuleIds(String ruleIds) {
+        this.ruleIds = ruleIds;
+    }
+
+    public String getTypeId() {
+        return typeId;
+    }
+
+    public void setTypeId(String typeId) {
+        this.typeId = typeId;
+    }
+
+    public String getRemind() {
+        return remind;
+    }
+
+    public void setRemind(String remind) {
+        this.remind = remind;
+    }
+}

+ 62 - 4
common-service/src/main/java/org/diagbot/common/work/SearchData.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.work;
+package org.diagbot.common.push.bean;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -23,15 +23,23 @@ public class SearchData {
     protected String resourceType;
     //外部系统编码 用于返回映射数据,如果sysCode为空或null,则返回kl_standard_info标准名称
     protected String sysCode;
-    private List<LisDetail> lisArr = new ArrayList<>();
+    private List<PreResult> lisArr = new ArrayList<>();
+    protected String normal  = "";
     protected String chief  = "";
     protected String symptom = "";
     protected String vital = "";
     protected String lis = "";
     protected String pacs = "";
     protected String diag = "";
+    private String diseaseName;
     protected String past = "";
     protected String other = "";
+    //当前开单lis项目
+    protected String lisOrder = "";
+    //当前开单pacs项目
+    protected String pacsOrder = "";
+    //当前开单其他 预留
+    protected String otherOrder = "";
     //大数据推送诊断结果信息
     protected List<FeatureRate> pushDiags = new ArrayList<>();
 
@@ -48,6 +56,8 @@ public class SearchData {
     private Map<String, Map<String, String>> graphInputs = new HashMap<>(10, 0.8f);
     //阴性 页面录入数据需要对结果过滤的集合
     private Map<String, Map<String, String>> filters = new HashMap<>(10, 0.8f);
+    //满足规则的ID集合
+    private Map<String, List<Rule>> rules = new HashMap<>();
 
     public Integer getDisType() {
         return disType;
@@ -57,11 +67,11 @@ public class SearchData {
         this.disType = disType;
     }
 
-    public List<LisDetail> getLisArr() {
+    public List<PreResult> getLisArr() {
         return lisArr;
     }
 
-    public void setLisArr(List<LisDetail> lisArr) {
+    public void setLisArr(List<PreResult> lisArr) {
         this.lisArr = lisArr;
     }
 
@@ -151,6 +161,14 @@ public class SearchData {
         this.inputs = inputs;
     }
 
+    public String getNormal() {
+        return normal;
+    }
+
+    public void setNormal(String normal) {
+        this.normal = normal;
+    }
+
     public String getChief() {
         return chief;
     }
@@ -262,4 +280,44 @@ public class SearchData {
     public void setGraphInputs(Map<String, Map<String, String>> graphInputs) {
         this.graphInputs = graphInputs;
     }
+
+    public String getLisOrder() {
+        return lisOrder;
+    }
+
+    public void setLisOrder(String lisOrder) {
+        this.lisOrder = lisOrder;
+    }
+
+    public String getPacsOrder() {
+        return pacsOrder;
+    }
+
+    public void setPacsOrder(String pacsOrder) {
+        this.pacsOrder = pacsOrder;
+    }
+
+    public String getOtherOrder() {
+        return otherOrder;
+    }
+
+    public void setOtherOrder(String otherOrder) {
+        this.otherOrder = otherOrder;
+    }
+
+    public Map<String, List<Rule>> getRules() {
+        return rules;
+    }
+
+    public void setRules(Map<String, List<Rule>> rules) {
+        this.rules = rules;
+    }
+
+    public String getDiseaseName() {
+        return diseaseName;
+    }
+
+    public void setDiseaseName(String diseaseName) {
+        this.diseaseName = diseaseName;
+    }
 }

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/Detail.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 /**
  * 指标的详细信息

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/Drugs.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import java.util.LinkedList;
 

+ 1 - 2
common-service/src/main/java/org/diagbot/common/javabean/Filnlly.java

@@ -1,8 +1,7 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 
 public class Filnlly {
     private List<Indicators> adverseEvent;

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/FuzhenFilnlly.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import java.util.ArrayList;
 import java.util.Map;

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/Indicators.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import java.util.List;
 

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/MangementEvaluation.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import com.alibaba.fastjson.JSONObject;
 

+ 12 - 1
common-service/src/main/java/org/diagbot/common/javabean/MedicalIndication.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import java.util.List;
 
@@ -7,6 +7,8 @@ import java.util.List;
  */
 public class MedicalIndication {
     private String name;
+    // 触发推送的规则
+    private String rule;
     private List<MedicalIndicationDetail> details;
 
     public String getName() {
@@ -17,6 +19,14 @@ public class MedicalIndication {
         this.name = name;
     }
 
+    public String getRule() {
+        return rule;
+    }
+
+    public void setRule(String rule) {
+        this.rule = rule;
+    }
+
     public List<MedicalIndicationDetail> getDetails() {
         return details;
     }
@@ -24,4 +34,5 @@ public class MedicalIndication {
     public void setDetails(List<MedicalIndicationDetail> details) {
         this.details = details;
     }
+
 }

+ 2 - 2
common-service/src/main/java/org/diagbot/common/javabean/MedicalIndicationDetail.java

@@ -1,9 +1,9 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 import com.alibaba.fastjson.JSONObject;
 
 public class MedicalIndicationDetail {
-    private Integer type;//1-量表,2-公式,3-其他指标
+    private Integer type;//1-量表,2-公式,3-其他指标,4-危急值
     private JSONObject content;
 
     public Integer getType() {

+ 1 - 1
common-service/src/main/java/org/diagbot/common/javabean/Medicition.java

@@ -1,4 +1,4 @@
-package org.diagbot.common.javabean;
+package org.diagbot.common.push.bean.neo4j;
 
 /**
  * 药

+ 190 - 0
common-push/src/main/java/org/diagbot/common/push/cache/ApplicationCacheUtil.java

@@ -0,0 +1,190 @@
+package org.diagbot.common.push.cache;
+
+import org.diagbot.common.push.bean.ResultMappingFilter;
+import org.diagbot.common.push.bean.Rule;
+import org.diagbot.common.push.bean.RuleApp;
+import org.diagbot.nlp.participle.cfg.Configuration;
+import org.diagbot.nlp.participle.cfg.DefaultConfig;
+import org.diagbot.nlp.util.NlpCache;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class ApplicationCacheUtil {
+
+    //词库同义词定义
+    public static Map<String, Map<String, String>> standard_info_synonym_map = null;
+    //诊断科室衍射
+    public static Map<String, String> doc_result_mapping_diag_map = null;
+    //特征性别 年龄过滤等
+    public static Map<String, Map<String, ResultMappingFilter>> doc_result_mapping_filter_map = null;
+    // 规则
+    public static Map<String, List<Rule>> kl_rule_filter_map = null;
+    //危险值提醒
+    public static Map<String, RuleApp> kl_rule_app_filter_map = null;
+    //pacs关系抽取过滤
+    public static Map<String, Map<String, String>> kl_diagnose_detail_filter_map = null;
+
+    public static Map<String, Map<String, String>> getStandard_info_synonym_map() {
+        if (standard_info_synonym_map == null) {
+            standard_info_synonym_map = NlpCache.getStandard_info_synonym_map();
+        }
+        return standard_info_synonym_map;
+    }
+
+    public static Map<String, String> getDoc_result_mapping_diag_map() {
+        if (doc_result_mapping_diag_map == null) {
+            createDoc_result_mapping_diag_map();
+        }
+        return doc_result_mapping_diag_map;
+    }
+
+    public static Map<String, String> createDoc_result_mapping_diag_map() {
+        Configuration configuration = new DefaultConfig();
+        doc_result_mapping_diag_map = configuration.loadMapDict("bigdata_diag_2_dept.dict");
+        return doc_result_mapping_diag_map;
+    }
+
+    public static Map<String, Map<String, ResultMappingFilter>> getDoc_result_mapping_filter_map() {
+        if (doc_result_mapping_filter_map == null) {
+            createDoc_result_mapping_filter_map();
+        }
+        return doc_result_mapping_filter_map;
+    }
+
+    public static Map<String, Map<String, ResultMappingFilter>> createDoc_result_mapping_filter_map() {
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_sex_age_filter.dict");
+        String[] line_string;
+        List<ResultMappingFilter> resultMappingFilters = new ArrayList<>();
+        try {
+            for (int i = 0; i < fileContents.size(); i++) {
+                line_string = org.apache.commons.lang3.StringUtils.split(fileContents.get(i), "\\|");
+                if (line_string.length == 5) {
+                    ResultMappingFilter resultMappingFilter = new ResultMappingFilter();
+                    resultMappingFilter.setFeatureName(line_string[0]);
+                    resultMappingFilter.setFeatureType(line_string[1]);
+                    resultMappingFilter.setSex(line_string[2]);
+                    resultMappingFilter.setAgeStart(Integer.parseInt(line_string[3]));
+                    resultMappingFilter.setAgeEnd(Integer.parseInt(line_string[4]));
+                    resultMappingFilters.add(resultMappingFilter);
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        doc_result_mapping_filter_map = new HashMap<>();
+        Map<String, ResultMappingFilter> filterMap = null;
+        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
+            filterMap = doc_result_mapping_filter_map.get(resultMappingFilter.getFeatureType());
+            if (filterMap == null) {
+                filterMap = new HashMap<>();
+            }
+            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
+            doc_result_mapping_filter_map.put(resultMappingFilter.getFeatureType(), filterMap);
+        }
+        return doc_result_mapping_filter_map;
+    }
+
+
+    public static Map<String, List<Rule>> getKl_rule_filter_map() {
+        if (kl_rule_filter_map == null) {
+            create_kl_rule_filter_map();
+        }
+        return kl_rule_filter_map;
+    }
+
+    public static void create_kl_rule_filter_map() {
+        kl_rule_filter_map = new HashMap<>();
+
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_rule_filter.dict");
+
+        List<Rule> rules = null;
+        for (String line : fileContents) {
+            String[] content = line.split("\\|", -1);
+            Rule rule = new Rule();
+            if (content.length == 12) {
+                rule.setId(content[0] == null ? "" : content[0]);
+                rule.setPub_name(content[1] == null ? "" : content[1]);
+                rule.setMin_operator(content[2] == null ? "" : content[2]);
+                rule.setMin_value(content[3] == null ? "" : content[3]);
+                rule.setMin_unit(content[4] == null ? "" : content[4]);
+                rule.setMax_operator(content[5] == null ? "" : content[5]);
+                rule.setMax_value(content[6] == null ? "" : content[6]);
+                rule.setMax_unit(content[7] == null ? "" : content[7]);
+                rule.setEq_operator(content[8] == null ? "" : content[8]);
+                rule.setEq_value(content[9] == null ? "" : content[9]);
+                rule.setEq_unit(content[10] == null ? "" : content[10]);
+                rule.setRemind(content[11] == null ? "" : content[11]);
+                if (kl_rule_filter_map.get(rule.getPub_name()) == null) {
+                    rules = new ArrayList<>();
+                } else {
+                    rules = kl_rule_filter_map.get(rule.getPub_name());
+                }
+                rules.add(rule);
+                kl_rule_filter_map.put(rule.getPub_name(), rules);
+            }
+        }
+    }
+
+    public static Map<String, RuleApp> getKl_rule_app_filter_map() {
+        if (kl_rule_app_filter_map == null) {
+            create_kl_rule_app_filter_map();
+        }
+        return kl_rule_app_filter_map;
+    }
+
+    public static void create_kl_rule_app_filter_map() {
+        kl_rule_app_filter_map = new HashMap<>();
+
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_rule_app_filter.dict");
+
+        for (String line : fileContents) {
+            String[] content = line.split("\\|", -1);
+            RuleApp ruleApp = new RuleApp();
+            if (content.length == 4) {
+                ruleApp.setId(content[0] == null ? "" : content[0]);
+                ruleApp.setRuleIds(content[1] == null ? "" : content[1]);
+                ruleApp.setTypeId(content[2] == null ? "" : content[2]);
+                ruleApp.setRemind(content[3] == null ? "" : content[3]);
+                kl_rule_app_filter_map.put(ruleApp.getId(), ruleApp);
+            }
+        }
+    }
+
+    public static Map<String, Map<String, String>> getKl_diagnose_detail_filter_map() {
+        if (kl_diagnose_detail_filter_map == null) {
+            create_kl_diagnose_detail_filter_map();
+        }
+        return kl_diagnose_detail_filter_map;
+    }
+
+    public static void create_kl_diagnose_detail_filter_map() {
+        kl_diagnose_detail_filter_map = new HashMap<>();
+        Map<String, String> diagnoseDetailRelationMap = new HashMap<>();
+        Set<String> diagnoseDetailRelation = new HashSet<>();
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_diagnose_detail_filter.dict");
+        for (String line : fileContents) {
+            String[] content = line.split("\\|", -1);
+            String[] relations = content[1].split("、");
+            for (String relation : relations) {
+                if (diagnoseDetailRelation.add(relation)) {
+                    if (kl_diagnose_detail_filter_map.get(content[0]) == null) {
+                        diagnoseDetailRelationMap.put(relation, relation);
+                        kl_diagnose_detail_filter_map.put(content[0], diagnoseDetailRelationMap);
+                    } else {
+                        kl_diagnose_detail_filter_map.get(content[0]).put(relation, relation);
+                    }
+                }
+            }
+        }
+    }
+}

+ 91 - 9
common-push/src/main/java/org/diagbot/common/push/cache/CacheFileManager.java

@@ -5,6 +5,7 @@ import org.diagbot.pub.utils.PropertiesUtil;
 import org.diagbot.pub.utils.security.EncrypDES;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.util.StringUtils;
 
 import java.io.File;
 import java.io.FileWriter;
@@ -29,8 +30,8 @@ public class CacheFileManager {
 
     public static void main(String[] args) {
         CacheFileManager cacheFileManager = new CacheFileManager();
-        String p = cacheFileManager.getClass().getClassLoader().getResource("").getPath();
-        p = "d:/cache_file/";
+        PropertiesUtil propertiesUtil = new PropertiesUtil("nlp.properties");
+        String p = propertiesUtil.getProperty("cache.file.dir");
         File file = new File(p);
         if (!file.exists()) {
             file.mkdirs();
@@ -72,7 +73,7 @@ public class CacheFileManager {
         try {
             EncrypDES encrypDES = new EncrypDES();
             //所有词典库 不能用concat_group 大小写不区分
-            String sql = "select l_1.name l_1_name, l_1.type_id type_id, l_2.name l_2_name, l_1.concept_id from kl_library_info l_1\n" +
+            String sql = "select l_1.name l_1_name, l_1.type_id type_id, l_2.name l_2_name, kc.lib_name from kl_library_info l_1\n" +
                     "                    left join kl_library_info l_2 on l_1.concept_id = l_2.concept_id and l_2.is_concept = 1\n" +
                     "left join kl_concept kc on l_1.concept_id = kc.id\n" +
                     "where kc.is_deleted = 'N' ";
@@ -163,6 +164,18 @@ public class CacheFileManager {
 
             fw = new FileWriter(path + "classify.dict");
             fw.close();
+
+            sql = "select name, type from kl_library_info_pacs order by name";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "pacs-tc.dict");
+            while (rs.next()) {
+                r1 = rs.getString(1);
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1 + "|9|"+ r2 + "|" + r1));
+                fw.write("\n");
+            }
+            fw.close();
         } catch (IOException ioe) {
             ioe.printStackTrace();
         } catch (SQLException sqle) {
@@ -190,11 +203,11 @@ public class CacheFileManager {
             st = conn.createStatement();
             rs = st.executeQuery(sql);
             FileWriter fw = new FileWriter(path + "graph_diag_classify.dict");
-            String  r2 ,r3;
+            String r2, r3;
             while (rs.next()) {
                 r2 = rs.getString(1);//疾病名称
                 r3 = rs.getString(2);//疾病类别
-                fw.write(encrypDES.encrytor(r2+ "|" + r3));
+                fw.write(encrypDES.encrytor(r2 + "|" + r3));
                 fw.write("\n");
             }
             fw.close();
@@ -212,7 +225,7 @@ public class CacheFileManager {
                 r2 = rs.getString(3);//sexType 1:男 2:女 3:都可以
                 r3 = rs.getString(4);//min_age
                 r4 = rs.getString(5);//max_age
-                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3+ "|" + r4));
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3 + "|" + r4));
                 fw.write("\n");
             }
             fw.close();
@@ -257,7 +270,7 @@ public class CacheFileManager {
                 r1 = rs.getString(1);
                 r2 = rs.getString(2);
                 r3 = rs.getString(3);
-                fw.write(encrypDES.encrytor(r1+ "|" + r2+ "|" + r3));
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3));
                 fw.write("\n");
             }
             fw.close();
@@ -328,18 +341,87 @@ public class CacheFileManager {
                 fw.write("\n");
             }
             fw.close();
+
+            //规则过滤信息
+            sql = "SELECT id, pub_name, min_operator, min_value, min_unit, max_operator, max_value, " +
+                    "max_unit, eq_operator, eq_value, eq_unit, remind FROM kl_rule_pub";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_rule_filter.dict");
+            String r6, r7, r8, r9, r10, r11, r12;
+            while (rs.next()) {
+                r1 = String.valueOf(rs.getInt(1));
+                r2 = rs.getString(2);
+                r3 = rs.getString(3);
+                r4 = rs.getString(4);
+                r5 = rs.getString(5);
+                r6 = rs.getString(6);
+                r7 = rs.getString(7);
+                r8 = rs.getString(8);
+                r9 = rs.getString(9);
+                r10 = rs.getString(10);
+                r11 = rs.getString(11);
+                r12 = rs.getString(12);
+                r1 = StringUtils.isEmpty(r1) ? "" : r1;
+                r2 = StringUtils.isEmpty(r2) ? "" : r2;
+                r3 = StringUtils.isEmpty(r3) ? "" : r3;
+                r4 = StringUtils.isEmpty(r4) ? "" : r4;
+                r5 = StringUtils.isEmpty(r5) ? "" : r5;
+                r6 = StringUtils.isEmpty(r6) ? "" : r6;
+                r7 = StringUtils.isEmpty(r7) ? "" : r7;
+                r8 = StringUtils.isEmpty(r8) ? "" : r8;
+                r9 = StringUtils.isEmpty(r9) ? "" : r9;
+                r10 = StringUtils.isEmpty(r10) ? "" : r10;
+                r11 = StringUtils.isEmpty(r11) ? "" : r11;
+                r12 = StringUtils.isEmpty(r12) ? "" : r12;
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3 + "|" + r4 + "|" + r5
+                        + "|" + r6 + "|" + r7 + "|" + r8 + "|" + r9 + "|" + r10 + "|" + r11
+                        + "|" + r12));
+                fw.write("\n");
+            }
+            fw.close();
+
+            sql = "SELECT id, rule_id, type_id, remind FROM kl_rule_app";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_rule_app_filter.dict");
+            while (rs.next()) {
+                r1 = String.valueOf(rs.getInt(1));
+                r2 = rs.getString(2);
+                r3 = rs.getString(3);
+                r4 = rs.getString(4);
+                r1 = StringUtils.isEmpty(r1) ? "" : r1;
+                r2 = StringUtils.isEmpty(r2) ? "" : r2;
+                r3 = StringUtils.isEmpty(r3) ? "" : r3;
+                r4 = StringUtils.isEmpty(r4) ? "" : r4;
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3 + "|" + r4));
+                fw.write("\n");
+            }
+            fw.close();
+
+            sql = "SELECT type,relation FROM `kl_diagnose_detail` WHERE type = 4 AND LENGTH(relation) > 0 GROUP BY relation";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_diagnose_detail_filter.dict");
+            while (rs.next()) {
+                r1 = String.valueOf(rs.getInt(1));
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1+ "|" + r2));
+                fw.write("\n");
+            }
+            fw.close();
         } catch (IOException ioe) {
             ioe.printStackTrace();
         } catch (SQLException sqle) {
             sqle.printStackTrace();
-        }  catch (Exception e) {
+        } catch (Exception e) {
             e.printStackTrace();
         } finally {
             nlpJdbc.close(rs, st, conn);
         }
     }
 
-    private List<Map.Entry<String, String>> rsToMap(ResultSet rs, boolean isJoin) throws SQLException{
+    private List<Map.Entry<String, String>> rsToMap(ResultSet rs, boolean isJoin) throws SQLException {
         String r1 = "";
         String r2 = "";
         Map<String, String> libraryMap = new HashMap<>(10);

+ 1 - 1
graph/src/main/java/org/diagbot/graph/util/CacheUtil.java

@@ -1,4 +1,4 @@
-package org.diagbot.graph.util;
+package org.diagbot.common.push.cache;
 
 import org.apache.commons.lang3.StringUtils;
 import org.diagbot.nlp.participle.cfg.Configuration;

+ 96 - 7
common-push/src/main/java/org/diagbot/common/push/filter/ClassifyDiag.java

@@ -2,9 +2,10 @@ package org.diagbot.common.push.filter;
 
 import com.alibaba.fastjson.JSONObject;
 import org.apache.commons.lang3.StringUtils;
-import org.diagbot.common.work.FeatureRate;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.graph.util.CacheUtil;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.cache.CacheUtil;
+import org.diagbot.nlp.relation.module.Lis;
 
 import java.text.DecimalFormat;
 import java.util.*;
@@ -55,7 +56,7 @@ public class ClassifyDiag {
      * @param graphFeatures 过滤前的诊断结果
      * @return 返回过滤后的诊断结果
      */
-    public List<FeatureRate> filterDiag(ResponseData graphResponseData,List<FeatureRate> graphFeatures){
+    public List<FeatureRate> filterDiag(ResponseData graphResponseData, List<FeatureRate> graphFeatures){
         //根据诊断依据规则过滤掉的诊断列表
         List<String> excludeDiag = graphResponseData.getExcludeDiag();
         //将需要排除的诊断从列表中删除
@@ -81,6 +82,7 @@ public class ClassifyDiag {
     public List<FeatureRate> diagClassify(List<FeatureRate> updateFeatures){
         List<FeatureRate> finalDiagList = new LinkedList<>();//最终返回
         List<String> highDiagList = new LinkedList<>();//警惕集合
+        List<String> diffDiagList = new LinkedList<>();//鉴别诊断集合
         List<String> queDiagList = new LinkedList<>();//确诊集合
         List<String> bigDiagList = new LinkedList<>();//可能诊断集合
         if(updateFeatures != null && updateFeatures.size()>0){
@@ -89,13 +91,21 @@ public class ClassifyDiag {
                 String desc = featureRate.getDesc();
                 Map<String,Object> d = new HashMap<>();
                 if(desc != null){
-                    JSONObject jsonObject = JSONObject.parseObject(desc);
+                   /* JSONObject jsonObject = JSONObject.parseObject(desc);
                     d = jsonObject;
                     if(d.keySet().size() == 1 && "警惕".equals(d.keySet().toArray()[0])){
                         highDiagList.add(featureName);
+                    }else if(d.keySet().size() == 1 && ("鉴别诊断".equals(d.keySet().toArray()[0])
+                            || "页面急诊".equals(d.keySet().toArray()[0])) || "急诊".equals(d.keySet().toArray()[0])){
+                        diffDiagList.add(featureName);
                     }else {
                         queDiagList.add(featureName);
-                    }
+                    }*/
+                   if(desc.contains("确诊") || desc.contains("拟诊")){
+                       queDiagList.add(featureName);
+                   }else {
+                       highDiagList.add(featureName);
+                   }
                 }else {
                     bigDiagList.add(featureName);
                 }
@@ -112,6 +122,17 @@ public class ClassifyDiag {
                 }
             }
         }
+        //再把鉴别诊断加进去
+        if(diffDiagList.size()>0){
+            for(int j =0;j<updateFeatures.size();j++){
+                FeatureRate featureRate = updateFeatures.get(j);
+                String featureName = featureRate.getFeatureName();
+                int i = diffDiagList.indexOf(featureName);
+                if(i >= 0){
+                    finalDiagList.add(featureRate);
+                }
+            }
+        }
         /**
          * 这里处理一下可能诊断剔除的功能
          * 如果图谱推出的诊断和可能诊断有层级关系,就把对应的可能诊断剔除
@@ -121,12 +142,16 @@ public class ClassifyDiag {
         System.out.println("图谱归一前数据 :"+queDiagList);
         if(queDiagList != null && queDiagList.size()>0){
             //图谱归一 ,图谱sign =0,大数据sign = 1
-            queSet = this.diagProcess(queDiagList,0);
+            List<String> que = this.processQue(updateFeatures, queDiagList);
+            queSet = this.diagProcess(que,0);
             System.out.println("图谱归一后的数据    :"+queSet);
             if(queSet != null && queSet.size()>0){
                 for (String queDis:queSet) {
                     if(queDiagList.indexOf(queDis)>=0){ //可以找到,就取出来,用原来的
                         FeatureRate feature = this.getFeature(updateFeatures, queDis);
+                        if(feature.getDesc().contains("拟诊")){
+                            feature.setDesc(feature.getDesc().replace("拟诊","确诊"));
+                        }
                         feature.setExtraProperty(diagDepartCache.get(queDis));
                         finalDiagList.add(feature);
                     }else {
@@ -223,6 +248,13 @@ public class ClassifyDiag {
                 String s = diagClassifyCache.get(dis);
                 if(s != null){
                     queAll.add(s);
+                    List<Object> key = this.getKey(diagClassifyCache, s);
+                    if(key.size()>0){
+                        for (Object o:key
+                             ) {
+                            queAll.add(o.toString());
+                        }
+                    }
                 }
                 queAll.add(dis);
                 List<Object> key = this.getKey(diagClassifyCache, dis);
@@ -520,5 +552,62 @@ public class ClassifyDiag {
         }
         return arrayList;
     }
+    //归一有确诊的诊断
+    public  List<String> processQue(List<FeatureRate> updateFeatures,List<String>queList){
+        List<String> finallyQue = new LinkedList<>();
+        List<String> que = new ArrayList<>();
+        List<String> ni = new ArrayList<>();
+        for (String qd:queList) {
+            for (FeatureRate f:updateFeatures) {
+                if("neo4j".equals(f.getSource()) && qd.equals(f.getFeatureName())){
+                    if(f.getDesc().contains("确诊")){
+                        que.add(qd);
+                    }else if(f.getDesc().contains("拟诊")) {
+                        ni.add(qd);
+                    }
+                }
+            }
+        }
+        if(que != null && que.size()>0){
+            for (String q:que) {
+                Set<String> classifySet = new HashSet<>();
+                String s = diagClassifyCache.get(q);
+                if(StringUtils.isNotEmpty(s)){
+                    classifySet.add(s);
+                    List<Object> key = this.getKey(diagClassifyCache, s);
+                    if(key != null && key.size()>0){
+                        for (Object o:key) {
+                            classifySet.add(o.toString());
+                            List<Object> key1 = this.getKey(diagClassifyCache, o.toString());
+                            if(key1 != null && key1.size()>0){
+                                for (Object f:key1
+                                     ) {
+                                    classifySet.add(f.toString());
+                                }
+                            }
+                        }
+                    }
+                }
+                List<Object> key = this.getKey(diagClassifyCache, q);
+                if(key != null && key.size()>0){
+                    for (Object o:key) {
+                        classifySet.add(o.toString());
+                    }
+                }
+                if(classifySet != null && classifySet.size()>0){
+                    for (String sq:classifySet
+                         ) {
+                        if(ni.indexOf(sq) >= 0){
+                            ni.remove(sq);
+                        }
+                    }
+                }
+            }
+        }
+        finallyQue.addAll(que);
+        finallyQue.addAll(ni);
+        return finallyQue;
+
+    }
 
 }

+ 85 - 0
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/Pretreatment.java

@@ -0,0 +1,85 @@
+package org.diagbot.common.push.filter.pretreat;
+
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.nlp.util.NlpUtil;
+import org.springframework.util.StringUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Created by louhr on 2019/8/31.
+ */
+public abstract class Pretreatment {
+    protected NegativeEnum[] nees_time_and_unit = new NegativeEnum[]{NegativeEnum.EVENT_TIME, NegativeEnum.UNIT};
+
+    protected int cursor = 0;
+
+    protected int max_back_search = 3;
+
+    public abstract List<PreResult> analyze(String content) throws java.io.IOException;
+
+    abstract PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index);
+
+    abstract String findBodyValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index);
+
+    protected List<PreResult> analyzeDefault(String content) throws java.io.IOException{
+        LexemePath<Lexeme> lexemes = ParticipleUtil.participle(content);
+
+        List<PreResult> preResultList = new ArrayList<>();
+        for (int i = 0; i < lexemes.size(); i++) {
+            Lexeme l = lexemes.get(i);
+            if (NlpUtil.isFeature(l.getProperty(), nees_time_and_unit)) {
+                PreResult result = data2Object(lexemes, l, i, l.getProperty());
+                if (result != null) {
+                    preResultList.add(result);
+                }
+            }
+        }
+        return preResultList;
+    }
+
+    protected PreResult data2Object(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index, String property) {
+        if (index < 2) {
+            return null;
+        }
+        return createPreResult(lexemes, lexeme, index);
+    }
+
+    public PreResult createDefaultPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        double value = findNumberValue(lexemes, lexeme, index);
+        if (value == -1) return null;
+        //继续往前找本体
+        String text = findBodyValue(lexemes, lexeme, index);
+        if (StringUtils.isEmpty(text)) {
+            return null;
+        }
+        PreResult result = new PreResult();
+        result.setValue(String.valueOf(value));
+        result.setUnits(lexeme.getText());
+        result.setDetailName(text);
+        result.setUniqueName(text);
+        return result;
+    }
+
+    protected double findNumberValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        if (index < 1) return -1;
+        cursor = index - 1;
+        Lexeme leftLexeme = lexemes.get(cursor);
+        if ("×".equals(leftLexeme.getText())) {
+            if  (cursor <= 0) return -1;
+            cursor--;
+            leftLexeme = lexemes.get(cursor);
+        }
+        if (NlpUtil.isNumberString(leftLexeme)) {
+            return NlpUtil.numberText2value(leftLexeme);
+        }
+        return -1;
+    }
+
+
+}

+ 39 - 0
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentDiag.java

@@ -0,0 +1,39 @@
+package org.diagbot.common.push.filter.pretreat;
+
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.nlp.util.NlpUtil;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class PretreatmentDiag extends Pretreatment {
+    protected NegativeEnum[] nees_disease = new NegativeEnum[]{NegativeEnum.DISEASE};
+
+    public List<PreResult> analyze(String content) throws java.io.IOException{
+        List<PreResult> preResults = new ArrayList<>();
+        LexemePath<Lexeme> lexemes = ParticipleUtil.participle(content);
+        for (int i = 0; i < lexemes.size(); i++) {
+            Lexeme l = lexemes.get(i);
+            if (NlpUtil.isFeature(l.getProperty(), nees_disease)) {
+                PreResult result = new PreResult();
+                result.setUniqueName("诊断--");
+                result.setDetailName("诊断--");
+                result.setValue(NlpUtil.concept(l, NegativeEnum.DISEASE));
+                preResults.add(result);
+            }
+        }
+        return preResults;
+    }
+
+    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return null;
+    }
+
+    public String findBodyValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return null;
+    }
+}

+ 93 - 0
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentLis.java

@@ -0,0 +1,93 @@
+package org.diagbot.common.push.filter.pretreat;
+
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.nlp.util.NlpUtil;
+
+import java.util.List;
+
+public class PretreatmentLis extends Pretreatment {
+    private String join_symbols = ";:;:";
+    protected NegativeEnum[] lisResult = new NegativeEnum[]{NegativeEnum.LIS_RESULT};
+
+    public List<PreResult> analyze(String content) throws java.io.IOException {
+        List<PreResult> preResults = super.analyzeDefault(content);
+        LexemePath<Lexeme> lexemes = ParticipleUtil.participle(content);
+        for (int i = 0; i < lexemes.size(); i++) {
+            Lexeme l = lexemes.get(i);
+            if (NlpUtil.isFeature(l.getProperty(), lisResult)) {
+                cursor = i;
+                PreResult result = new PreResult();
+                result = findPreResultPub(lexemes, result);
+                if (result != null) {
+                    result.setOtherValue(l.getText());
+                    preResults.add(result);
+                }
+            }
+        }
+        return preResults;
+    }
+
+    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        PreResult result = new PreResult();
+        double value = findNumberValue(lexemes, lexeme, index);
+        if (value == -1) return null;
+        result.setValue(String.valueOf(value));
+        result.setUnits(lexeme.getText());
+        return findPreResultPub(lexemes, result);
+    }
+
+    public PreResult findPreResultPub(LexemePath<Lexeme> lexemes, PreResult result) {
+        //继续往前找化验明细项
+        if (cursor > 0) cursor--;
+        Lexeme leftLexeme = lexemes.get(cursor);
+        if (join_symbols.contains(leftLexeme.getText())) {
+            if (cursor > 0) {
+                cursor--;
+                leftLexeme = lexemes.get(cursor);
+            } else {
+                return null;
+            }
+        }
+        if (NlpUtil.isFeature(leftLexeme.getProperty(), new NegativeEnum[]{NegativeEnum.LIS_NAME})) {
+            result.setDetailName(NlpUtil.concept(leftLexeme, NegativeEnum.LIS_NAME));
+        } else if (NlpUtil.isFeature(leftLexeme.getProperty(), new NegativeEnum[]{NegativeEnum.PUB_NAME})) {
+            result.setUniqueName(NlpUtil.concept(leftLexeme, NegativeEnum.PUB_NAME));
+        } else {
+            return null;
+        }
+
+        //查找化验套餐
+        int position = cursor - 1;
+        while (position > -1) {
+            leftLexeme = lexemes.get(position);
+            if (NlpUtil.isFeature(leftLexeme.getProperty(), new NegativeEnum[]{NegativeEnum.LIS_TYPE})) {
+                result.setName(NlpUtil.concept(leftLexeme, NegativeEnum.LIS_TYPE));
+                break;
+            }
+            position--;
+        }
+
+        if (StringUtils.isEmpty(result.getUniqueName())){
+            if (StringUtils.isNotEmpty(result.getDetailName()) && StringUtils.isNotEmpty(result.getName())){
+                result.setUniqueName(result.getName() + "--" + result.getDetailName());
+            } else if (StringUtils.isNotEmpty(result.getDetailName())){
+                result.setUniqueName(result.getDetailName());
+            } else if (StringUtils.isNotEmpty(result.getName())){
+                result.setUniqueName(result.getName());
+            } else {
+                return null;
+            }
+        }
+
+        return result;
+    }
+
+    public String findBodyValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return null;
+    }
+}

+ 43 - 0
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentMakeList.java

@@ -0,0 +1,43 @@
+package org.diagbot.common.push.filter.pretreat;
+
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.nlp.util.NlpUtil;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class PretreatmentMakeList extends Pretreatment {
+    protected NegativeEnum[] nees_pacs_name = new NegativeEnum[]{NegativeEnum.PACS_NAME};
+    protected NegativeEnum[] nees_lis_type = new NegativeEnum[]{NegativeEnum.LIS_TYPE};
+
+    public List<PreResult> analyze(String content) throws java.io.IOException{
+        List<PreResult> preResults = new ArrayList<>();
+        LexemePath<Lexeme> lexemes = ParticipleUtil.participle(content);
+        for (int i = 0; i < lexemes.size(); i++) {
+            Lexeme l = lexemes.get(i);
+            PreResult result = new PreResult();
+            result.setUniqueName("开单--");
+            result.setDetailName("开单--");
+            if (NlpUtil.isFeature(l.getProperty(), nees_pacs_name)) {
+                result.setValue(NlpUtil.concept(l, NegativeEnum.PACS_NAME));
+                preResults.add(result);
+            } else if (NlpUtil.isFeature(l.getProperty(), nees_lis_type)) {
+                result.setValue(NlpUtil.concept(l, NegativeEnum.LIS_TYPE));
+                preResults.add(result);
+            }
+        }
+        return preResults;
+    }
+
+    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return null;
+    }
+
+    public String findBodyValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return null;
+    }
+}

+ 46 - 0
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentNormal.java

@@ -0,0 +1,46 @@
+package org.diagbot.common.push.filter.pretreat;
+
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.nlp.util.NlpUtil;
+
+import java.util.List;
+
+public class PretreatmentNormal extends Pretreatment {
+    public List<PreResult> analyze(String content) throws java.io.IOException{
+        List<PreResult> preResults = super.analyzeDefault(content);
+
+        LexemePath<Lexeme> lexemes = ParticipleUtil.participle(content);
+        for (int i = 0; i < lexemes.size(); i++) {
+            Lexeme l = lexemes.get(i);
+            if (l.getText().equals("性别") && i < lexemes.size() - 1) {
+                Lexeme afterLexeme = lexemes.get(i + 1);
+                if ("男性".equals(afterLexeme.getText()) || "女性".equals(afterLexeme.getText())) {
+                    PreResult result = new PreResult();
+                    result.setUniqueName(l.getText()+"--");
+                    result.setDetailName(l.getText()+"--");
+                    result.setValue(afterLexeme.getText());
+                    preResults.add(result);
+                    return preResults;
+                }
+            }
+        }
+        return preResults;
+    }
+
+    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return super.createDefaultPreResult(lexemes, lexeme, index);
+    }
+
+    public String findBodyValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        if (cursor > 0) cursor--;
+        Lexeme leftLexeme = lexemes.get(cursor);
+        if (leftLexeme.getText().equals("年龄")) {
+            return leftLexeme.getText()+"--";
+        }
+        return null;
+    }
+}

+ 45 - 0
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentOther.java

@@ -0,0 +1,45 @@
+package org.diagbot.common.push.filter.pretreat;
+
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.nlp.util.NlpUtil;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class PretreatmentOther extends Pretreatment {
+    protected NegativeEnum[] nees_disease = new NegativeEnum[]{NegativeEnum.DISEASE};
+    protected NegativeEnum[] nees_operation = new NegativeEnum[]{NegativeEnum.OPERATION};
+
+    public List<PreResult> analyze(String content) throws java.io.IOException{
+        List<PreResult> preResults = new ArrayList<>();
+        LexemePath<Lexeme> lexemes = ParticipleUtil.participle(content);
+        for (int i = 0; i < lexemes.size(); i++) {
+            Lexeme l = lexemes.get(i);
+            PreResult result = new PreResult();
+            if (NlpUtil.isFeature(l.getProperty(), nees_disease)) {
+                result.setUniqueName("诊断--");
+                result.setDetailName("诊断--");
+                result.setValue(NlpUtil.concept(l, NegativeEnum.DISEASE));
+                preResults.add(result);
+            } else if (NlpUtil.isFeature(l.getProperty(), nees_operation)) {
+                result.setUniqueName("手术--");
+                result.setDetailName("手术--");
+                result.setValue(NlpUtil.concept(l, NegativeEnum.OPERATION));
+                preResults.add(result);
+            }
+        }
+        return preResults;
+    }
+
+    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return null;
+    }
+
+    public String findBodyValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return null;
+    }
+}

+ 128 - 0
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentPacs.java

@@ -0,0 +1,128 @@
+package org.diagbot.common.push.filter.pretreat;
+
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.nlp.util.NlpUtil;
+import org.springframework.util.StringUtils;
+
+import java.util.List;
+
+public class PretreatmentPacs extends Pretreatment {
+    protected NegativeEnum[] nees_pacs_result = new NegativeEnum[] { NegativeEnum.PACS_RESULT };
+    protected NegativeEnum[] nees_pacs_name = new NegativeEnum[] { NegativeEnum.PACS_NAME };
+    private String join_symbols = ";:;:";
+
+    public List<PreResult> analyze(String content) throws java.io.IOException {
+        List<PreResult> preResultList = super.analyzeDefault(content);
+        //pacs除了数值型需要转, 还需要对部分检查结果提取,以便做危机警示
+        LexemePath<Lexeme> lexemes = ParticipleUtil.participle(content);
+
+        Lexeme leftLexeme;
+        for (int i = 0; i < lexemes.size(); i++) {
+            Lexeme l = lexemes.get(i);
+            if (NlpUtil.isFeature(l.getProperty(), nees_pacs_result) && i > 0) {
+                int c = i - 1;
+                while (c > -1) {
+                    leftLexeme = lexemes.get(c);
+                    if (NlpUtil.isFeature(leftLexeme.getProperty(), nees_pacs_name)) {
+                        PreResult result = new PreResult();
+                        result.setValue(NlpUtil.concept(l, NegativeEnum.PACS_RESULT));
+                        result.setDetailName(NlpUtil.concept(leftLexeme, NegativeEnum.PACS_NAME));
+                        result.setUniqueName(NlpUtil.concept(leftLexeme, NegativeEnum.PACS_NAME));
+                        preResultList.add(result);
+                        break;
+                    }
+                    c--;
+                }
+            } else if (NlpUtil.isFeature(l.getProperty(), nees_time_and_unit) && i > 0) {
+                PreResult result = data2Object(lexemes, l, i, l.getProperty());
+                if (result != null) {
+                    preResultList.add(result);
+                }
+            }
+        }
+        return preResultList;
+    }
+
+    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        double value = findNumberValue(lexemes, lexeme, index);
+        if (value == -1) {
+            return null;
+        }
+//        //继续往前找本体
+//        String text = findBodyValue(lexemes, lexeme, index);
+//        if (StringUtils.isEmpty(text)) {
+//            return null;
+//        }
+        PreResult result = new PreResult();
+        result.setValue(String.valueOf(value));
+        result.setUnits(lexeme.getText());
+        return getPreResultPub(lexemes, result);
+    }
+
+    public String findBodyValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return null;
+    }
+
+    public PreResult getPreResultPub(LexemePath<Lexeme> lexemes, PreResult result) {
+        //继续往前找辅检明细项
+        if (cursor > 0) {
+            cursor--;
+        }
+        Lexeme leftLexeme = lexemes.get(cursor);
+        if (join_symbols.contains(leftLexeme.getText())) {
+            if (cursor > 0) {
+                cursor--;
+                leftLexeme = lexemes.get(cursor);
+            } else {
+                return null;
+            }
+        }
+        if (NlpUtil.isFeature(leftLexeme.getProperty(), new NegativeEnum[] { NegativeEnum.PACS_NAME })) {
+            result.setDetailName(NlpUtil.concept(leftLexeme, NegativeEnum.PACS_NAME));
+        } else if (NlpUtil.isFeature(leftLexeme.getProperty(), new NegativeEnum[] { NegativeEnum.PACS_NAME })) {
+            result.setUniqueName(NlpUtil.concept(leftLexeme, NegativeEnum.PACS_NAME));
+        } else {
+            return null;
+        }
+        return result;
+    }
+
+    protected double findNumberValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        if (index < 1) {
+            return -1;
+        }
+        cursor = index - 1;
+        Lexeme leftLexeme = lexemes.get(cursor);
+        if (isNumberString(leftLexeme)) {
+            String[] numbersSplit = leftLexeme.getText().split("\\*");
+            try {
+                if (numbersSplit.length == 2) {
+                    return Double.valueOf(numbersSplit[0]) * Double.valueOf(numbersSplit[1]);
+                } else if (numbersSplit.length == 3) {
+                    return Double.valueOf(numbersSplit[0]) * Double.valueOf(numbersSplit[1])
+                            * Double.valueOf(numbersSplit[2]);
+                } else {
+                    return -1;
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+        return -1;
+    }
+
+    public static boolean isNumberString(Lexeme l) {
+        if (l == null) {
+            return false;
+        }
+        if (NlpUtil.isFeature(l.getProperty(), new NegativeEnum[] { NegativeEnum.DIGITS })
+                && l.getText().indexOf("*") != -1) {
+            return true;
+        }
+        return false;
+    }
+}

+ 44 - 0
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentSymptom.java

@@ -0,0 +1,44 @@
+package org.diagbot.common.push.filter.pretreat;
+
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.nlp.util.NlpUtil;
+
+import java.util.List;
+
+public class PretreatmentSymptom extends Pretreatment {
+    protected NegativeEnum[] nees_symptom = new NegativeEnum[]{NegativeEnum.SYMPTOM};
+
+    public List<PreResult> analyze(String content) throws java.io.IOException{
+        List<PreResult> preResults = super.analyzeDefault(content);
+
+        LexemePath<Lexeme> lexemes = ParticipleUtil.participle(content);
+        for (int i = 0; i < lexemes.size(); i++) {
+            Lexeme l = lexemes.get(i);
+            PreResult result = new PreResult();
+            result.setUniqueName("症状--");
+            result.setDetailName("症状--");
+            if (NlpUtil.isFeature(l.getProperty(), nees_symptom)) {
+                result.setValue(NlpUtil.concept(l, NegativeEnum.SYMPTOM));
+                preResults.add(result);
+            }
+        }
+        return preResults;
+    }
+
+    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return super.createDefaultPreResult(lexemes, lexeme, index);
+    }
+
+    public String findBodyValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        if (cursor > 0) cursor--;
+        Lexeme leftLexeme = lexemes.get(cursor);
+        if (NlpUtil.isFeature(leftLexeme.getProperty(), new NegativeEnum[]{NegativeEnum.SYMPTOM})) {
+            return NlpUtil.concept(leftLexeme, NegativeEnum.SYMPTOM);
+        }
+        return null;
+    }
+}

+ 57 - 0
common-push/src/main/java/org/diagbot/common/push/filter/pretreat/PretreatmentVital.java

@@ -0,0 +1,57 @@
+package org.diagbot.common.push.filter.pretreat;
+
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.nlp.util.NlpUtil;
+
+import java.util.List;
+
+public class PretreatmentVital extends Pretreatment {
+    protected NegativeEnum[] nees_vital_result = new NegativeEnum[]{NegativeEnum.VITAL_RESULT};
+    protected NegativeEnum[] nees_vital_index = new NegativeEnum[]{NegativeEnum.VITAL_INDEX};
+
+    public List<PreResult> analyze(String content) throws java.io.IOException{
+        List<PreResult> preResults = super.analyzeDefault(content);
+        LexemePath<Lexeme> lexemes = ParticipleUtil.participle(content);
+        for (int i = 0; i < lexemes.size(); i++) {
+            Lexeme l = lexemes.get(i);
+            if (NlpUtil.isFeature(l.getProperty(), nees_vital_result)) {
+                PreResult result = new PreResult();
+                result.setUniqueName("体征结果--");
+                result.setDetailName("体征结果--");
+                result.setValue(NlpUtil.concept(l, NegativeEnum.VITAL_RESULT));
+                preResults.add(result);
+            }else if (NlpUtil.isFeature(l.getProperty(), nees_vital_index)) {
+                PreResult result = new PreResult();
+                result.setUniqueName("体征结果--");
+                result.setDetailName("体征结果--");
+                result.setValue(NlpUtil.concept(l, NegativeEnum.VITAL_INDEX));
+                preResults.add(result);
+            }
+        }
+        return preResults;
+    }
+
+    public PreResult createPreResult(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        return super.createDefaultPreResult(lexemes, lexeme, index);
+    }
+
+
+    public String findBodyValue(LexemePath<Lexeme> lexemes, Lexeme lexeme, int index) {
+        if (cursor > 0) cursor--;
+        int search_len = 0;
+        Lexeme leftLexeme = null;
+        while (search_len < max_back_search && cursor > -1) {
+            leftLexeme = lexemes.get(cursor);
+            if (NlpUtil.isFeature(leftLexeme.getProperty(), new NegativeEnum[]{NegativeEnum.VITAL_INDEX})) {
+                return NlpUtil.concept(leftLexeme, NegativeEnum.VITAL_INDEX);
+            }
+            search_len++;
+            cursor--;
+        }
+        return null;
+    }
+}

+ 189 - 0
common-push/src/main/java/org/diagbot/common/push/filter/rule/PretreatmentRule.java

@@ -0,0 +1,189 @@
+package org.diagbot.common.push.filter.rule;
+
+
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.Rule;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
+import org.diagbot.common.push.filter.pretreat.*;
+import org.diagbot.pub.Constants;
+import org.springframework.util.StringUtils;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class PretreatmentRule {
+    public void rule(SearchData searchData) throws java.io.IOException {
+        //患者基本信息 性别 年龄
+        if (!StringUtils.isEmpty(searchData.getNormal())) {
+            searchData.setNormal(add2PreResultList(new PretreatmentNormal(), searchData.getNormal(), "normal", searchData));
+        }
+        //症状数据
+        if (!StringUtils.isEmpty(searchData.getSymptom())) {
+            searchData.setSymptom(add2PreResultList(new PretreatmentSymptom(), searchData.getSymptom(), "symptom", searchData));
+        }
+        //体征数据
+        if (!StringUtils.isEmpty(searchData.getVital())) {
+            searchData.setVital(add2PreResultList(new PretreatmentVital(), searchData.getVital(), "vital", searchData));
+        }
+        //lis结构化信息
+        if (searchData.getLisArr() != null && searchData.getLisArr().size() > 0) {
+            searchData.setLis(add2PreResultList(searchData.getLisArr(), searchData.getLis(), "lis", searchData));
+        } else if (!StringUtils.isEmpty(searchData.getLis())) {
+            searchData.setLis(add2PreResultList(new PretreatmentLis(), searchData.getLis(), "lis", searchData));
+        }
+        //pacs数据
+        if (!StringUtils.isEmpty(searchData.getPacs())) {
+            searchData.setPacs(add2PreResultList(new PretreatmentPacs(), searchData.getPacs(), "pacs", searchData));
+        }
+        //其他史
+        if (!StringUtils.isEmpty(searchData.getOther())) {
+            searchData.setOther(add2PreResultList(new PretreatmentOther(), searchData.getOther(), "other", searchData));
+        }
+        //开具诊断
+        if (!StringUtils.isEmpty(searchData.getDiag())) {
+            add2PreResultList(new PretreatmentDiag(), searchData.getDiag(), "diag", searchData);
+        }
+        //开具辅检化验
+        if (!StringUtils.isEmpty(searchData.getLisOrder())) {
+            add2PreResultList(new PretreatmentMakeList(), searchData.getLisOrder(), "lisOrder", searchData);
+        }
+        //开具辅检化验
+        if (!StringUtils.isEmpty(searchData.getPacsOrder())) {
+            add2PreResultList(new PretreatmentMakeList(), searchData.getPacsOrder(), "pacsOrder", searchData);
+        }
+    }
+
+    private String add2PreResultList(Pretreatment pretreatment, String content, String ruleType, SearchData searchData) throws java.io.IOException {
+        List<PreResult> preResultList = pretreatment.analyze(content);
+        return add2PreResultList(preResultList, content, ruleType, searchData);
+    }
+
+    public static void main(String[] args) throws IOException {
+        PretreatmentRule pretreatmentRule = new PretreatmentRule();
+        SearchData searchData = new SearchData();
+        searchData.setSymptom("钠(Na)110mmol/L");
+        pretreatmentRule.rule(searchData);
+    }
+
+    private String add2PreResultList(List<PreResult> preResultList, String content, String ruleType, SearchData searchData) throws java.io.IOException {
+        Map<String, List<Rule>> kl_rule_filter_map = ApplicationCacheUtil.getKl_rule_filter_map();
+        //符合条件的规则
+        Map<String, List<Rule>> accord_rule_map = searchData.getRules();
+        List<Rule> accord_rules = null;
+        if (preResultList != null) {
+            for (PreResult result : preResultList) {
+                //规则库中匹配
+                if (kl_rule_filter_map.get(result.getUniqueName()) != null) {
+                    //结构化数据进来非数字类型值保存在otherValue,赋值到value中
+                    if(!StringUtils.isEmpty(result.getOtherValue())) {
+                        result.setValue(result.getOtherValue());
+                    }
+                    List<Rule> rules = kl_rule_filter_map.get(result.getUniqueName());
+                    if (rules == null) {
+                        continue;
+                    }
+                    for (Rule rule : rules) {
+                        boolean isSuit = suitRule(result, rule, content);
+                        if (isSuit) {
+                            rule.setOriginText(result.getUniqueName() + ":" + result.getValue() + result.getUnits());
+                            if (accord_rule_map.get(ruleType) == null) {
+                                accord_rules = new ArrayList<>();
+                            } else {
+                                accord_rules = accord_rule_map.get(ruleType);
+                            }
+                            accord_rules.add(rule);
+                            accord_rule_map.put(ruleType, accord_rules);
+                            searchData.setRules(accord_rule_map);
+
+                            content = content + (rule.getRemind() == null ? "" : rule.getRemind());
+                        }
+                    }
+                    //还原回去
+                    if(!StringUtils.isEmpty(result.getOtherValue())) {
+                        result.setValue(null);
+                    }
+                }
+            }
+
+        }
+        return content;
+    }
+
+    private boolean suitRule(PreResult result, Rule rule, String content) {
+        //标准值最优先匹配
+        if (org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getEq_value())) {
+            if (compareEqual(result.getValue(), rule.getEq_value())) {
+                return true;
+            } else {
+                return false;
+            }
+        } else if (org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getMax_value()) && org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getMin_value())) {
+            if (compareMin(result.getValue(), rule.getMax_value(), rule.getMax_operator()) //比最大值小
+                    && compareMax(result.getValue(), rule.getMin_value(), rule.getMin_operator()) //比最小值大
+                    && result.getUnits().equals(rule.getMin_unit())
+                    && result.getUnits().equals(rule.getMax_unit())) {
+                return true;
+            } else {
+                return false;
+            }
+        } else if (org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getMin_value())) {
+            if (compareMin(result.getValue(), rule.getMin_value(), rule.getMin_operator())   //比最小值小
+                    && result.getUnits().equals(rule.getMin_unit())) {
+                return true;
+            } else {
+                return false;
+            }
+        } else if(org.apache.commons.lang3.StringUtils.isNotEmpty(rule.getMax_value()))  {
+            if (compareMax(result.getValue(), rule.getMax_value(), rule.getMax_operator())   //比最大值大
+                    && result.getUnits().equals(rule.getMax_unit())) {
+                return true;
+            } else {
+                return false;
+            }
+        }
+        return false;
+    }
+
+
+    private boolean compareEqual(String c1, String c2) {
+        if (!StringUtils.isEmpty(c1) && !StringUtils.isEmpty(c2)
+                && c1.equals(c2)) {
+            return true;
+        }
+        return false;
+    }
+
+    private boolean compareMin(String c1, String c2, String operator) {
+        if (!StringUtils.isEmpty(c1) && !StringUtils.isEmpty(c2) && !StringUtils.isEmpty(operator)) {
+            try {
+                if (operator.contains("=")) {
+                    return Double.valueOf(c1) <= Double.valueOf(c2);
+                } else {
+                    return Double.valueOf(c1) < Double.valueOf(c2);
+                }
+            } catch (Exception e) {
+            }
+        }
+        return false;
+    }
+
+    private boolean compareMax(String c1, String c2, String operator) {
+        if (!StringUtils.isEmpty(c1) && !StringUtils.isEmpty(c2) && !StringUtils.isEmpty(operator)) {
+            try {
+                if (operator.contains("=")) {
+                    return Double.valueOf(c1) >= Double.valueOf(c2);
+                } else {
+                    return Double.valueOf(c1) > Double.valueOf(c2);
+                }
+            } catch (Exception e) {
+            }
+        }
+        return false;
+    }
+
+}

+ 24 - 2
bigdata-web/src/main/java/org/diagbot/bigdata/util/BigDataConstants.java

@@ -1,4 +1,7 @@
-package org.diagbot.bigdata.util;
+package org.diagbot.common.push.util;
+
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  * @ClassName org.diagbot.bigdata.util.BigDataConstants
@@ -7,7 +10,7 @@ package org.diagbot.bigdata.util;
  * @Date 2019/1/16/016 14:06
  * @Version 1.0
  **/
-public class BigDataConstants {
+public class PushConstants {
     public final static String resource_type_i = "I";       //住院
     public final static String resource_type_o = "O";       //门诊
     public final static String resource_type_e = "E";       //急诊
@@ -38,4 +41,23 @@ public class BigDataConstants {
     public final static String result_mapping_vital = "resultMappingVitalMap";          //推送体征结果名称映射
     public final static String result_mapping_diag = "resultMappingDiagMap";          //推送疾病科室名称映射
     public final static String result_mapping_filter = "resultMappingFilterMap";          //推送结果年龄 性别过滤
+
+    //关系抽取property_id对应property_name
+    public final static Map<String,String> featureTypeMap = new HashMap<String,String>(){{
+        put("80","辅检其他");
+        put("9","单位");
+        put("2","时间");
+        put("3","部位");
+        put("7","反意或虚拟");
+        put("16","辅检项目");
+        put("17","辅检结果");
+        put("81","属性");
+        put("82","方位");
+        put("83","形容词");
+        put("84","局部结构");
+        put("85","属性值");
+        put("86","表现");
+        put("28","字母与数值");
+        put("87","正常表现");
+    }};
 }

+ 258 - 0
common-push/src/main/java/org/diagbot/common/push/work/ParamsDataProxy.java

@@ -0,0 +1,258 @@
+package org.diagbot.common.push.work;
+
+import org.algorithm.core.cnn.AlgorithmCNNExecutor;
+import org.algorithm.core.cnn.AlgorithmCNNExecutorPacs;
+import org.algorithm.factory.RelationExtractionFactory;
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.util.PushConstants;
+import org.diagbot.nlp.feature.FeatureAnalyze;
+import org.diagbot.nlp.feature.FeatureType;
+import org.diagbot.nlp.util.Constants;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+
+/**
+ * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 14:04
+ * @Version 1.0
+ **/
+public class ParamsDataProxy {
+    Logger logger = LoggerFactory.getLogger(ParamsDataProxy.class);
+
+    public void createNormalInfo(SearchData searchData) throws Exception {
+        //计算年龄区间
+        if (searchData.getAge() > 0) {
+            searchData.setAge_start(searchData.getAge() - 5);
+            searchData.setAge_end(searchData.getAge() + 5);
+
+            searchData.setNormal("年龄" + searchData.getAge() + "岁");
+        }
+        //修改性别代码
+        if (!StringUtils.isEmpty(searchData.getSex())) {
+            if ("M".equals(searchData.getSex())) {
+                searchData.setSex("1");
+                searchData.setNormal(searchData.getNormal() + "性别男性");
+            } else if ("F".equals(searchData.getSex())) {
+                searchData.setSex("2");
+                searchData.setNormal(searchData.getNormal() + "性别女性");
+            } else {
+                searchData.setSex("3");
+                searchData.setNormal(searchData.getNormal() + "性别其他");
+            }
+        } else {
+            searchData.setSex("3");
+        }
+    }
+
+    public void createSearchData(SearchData searchData) throws Exception {
+        //消除空格
+        if (searchData.getSymptom() != null) {
+            searchData.setSymptom(searchData.getSymptom().trim());
+        }
+        if (searchData.getDiag() != null) {
+            searchData.setDiag(searchData.getDiag().trim());
+        }
+        //默认查询门诊数据
+        if (StringUtils.isEmpty(searchData.getResourceType())) {
+            searchData.setResourceType(PushConstants.resource_type_o);
+        }
+        if (StringUtils.isNotEmpty(searchData.getSymptom())) {
+            searchData.setSymptom(searchData.getSymptom().trim());
+        }
+        //一次推送多个类别信息
+        String[] featureTypes = searchData.getFeatureType().split(",");
+        //featureType统一转换
+        String[] convertFeatureTypes = new String[featureTypes.length];
+        for (int i = 0; i < featureTypes.length; i++) {
+            convertFeatureTypes[i] = convertFeatureType(searchData.getSysCode(), featureTypes[i]);
+        }
+        searchData.setFeatureType(StringUtils.join(convertFeatureTypes, ","));
+        searchData.setFeatureTypes(convertFeatureTypes);
+
+        //获取入参中的特征信息
+        FeatureAnalyze fa = new FeatureAnalyze();
+        List<Map<String, Object>> featuresList = new ArrayList<>();
+        if (!StringUtils.isEmpty(searchData.getSymptom())) {
+            //提取现病史
+            featuresList = fa.start(searchData.getSymptom(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+            //提取时间信息
+            featuresList = fa.start(searchData.getSymptom(), FeatureType.TIME);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getVital())) {
+            //提取体征
+            featuresList = fa.start(searchData.getVital(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getPast())) {
+            //提取既往史
+            featuresList = fa.start(searchData.getPast(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getOther()) || !StringUtils.isEmpty(searchData.getIndications())) {
+            //提取其他史等
+            featuresList = fa.start((searchData.getOther() == null ? "" : searchData.getOther()) + (searchData.getIndications() == null ? "" : searchData.getIndications()), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getPacs())) {
+            featuresList = fa.start(searchData.getPacs(), FeatureType.PACS);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getLis())) {
+            featuresList = fa.start(searchData.getLis(), FeatureType.LIS);
+            paramFeatureInit(searchData, featuresList);
+        }
+        // 清洗特征词,去除词性不匹配的词
+        searchData = cleanFeature(featuresList, fa, searchData);
+        if (!StringUtils.isEmpty(searchData.getOther())) {
+            //如果既往史中诊断信息,需要提取这个特征
+            featuresList = fa.start(searchData.getOther(), FeatureType.DIAG);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom())) {
+            featuresList = fa.start(searchData.getDiag(), FeatureType.DIAG);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getPacs())) {
+            //关系抽取模型
+            AlgorithmCNNExecutorPacs algorithmCNNExecutor = RelationExtractionFactory.getInstance();
+            RelationExtractionUtil re = new RelationExtractionUtil();
+            //Pacs原始分词结果
+            List<List<String>> execute = algorithmCNNExecutor.execute(searchData.getPacs(), re.createTriad(searchData));
+            if (execute != null && execute.size() > 0) {
+                re.addToSearchDataInputs(execute, searchData);
+            }
+        }
+    }
+
+    /**
+     * 外部系统featureType需要转化为大数据定义的featureType
+     *
+     * @param sysCode
+     * @param featureType
+     * @return
+     */
+    private String convertFeatureType(String sysCode, String featureType) {
+        if (StringUtils.isEmpty(sysCode) || sysCode.equals("1")) {
+            if ("1".equals(featureType)) {
+                return PushConstants.feature_type_symptom;
+            }
+            if ("7".equals(featureType)) {
+                return PushConstants.feature_type_diag;
+            }
+            if ("4".equals(featureType)) {
+                return PushConstants.feature_type_vital;
+            }
+            if ("5".equals(featureType)) {
+                return PushConstants.feature_type_lis;
+            }
+            if ("6".equals(featureType)) {
+                return PushConstants.feature_type_pacs;
+            }
+            if ("3".equals(featureType)) {
+                return PushConstants.feature_type_history;
+            }
+            if ("8".equals(featureType)) {
+                return PushConstants.feature_type_treat;
+            }
+            if ("22".equals(featureType)) {
+                return PushConstants.feature_type_labelpush;
+            }
+            if ("11".equals(featureType)) {
+                return PushConstants.feature_type_manju;
+            }
+            if ("42".equals(featureType)) {
+                return PushConstants.feature_type_vital_index;
+            }
+            return null;
+        }
+        return featureType;
+    }
+
+    private SearchData cleanFeature(List<Map<String, Object>> featuresList, FeatureAnalyze fa,
+                                    SearchData searchData) {
+        // 在输入的辅检文本中,只提取辅检信息
+        String[] PACS_Feature = { Constants.word_property_PACS,
+                Constants.word_property_PACS_Detail, Constants.word_property_PACS_Result };
+        searchData = removeFeature(searchData.getLis(), fa, searchData, PACS_Feature, FeatureType.PACS);
+
+        // 在输入的化验文本中,只提取化验信息
+        String[] LIS_Feature = { Constants.word_property_LIS,
+                Constants.word_property_LIS_Detail, Constants.word_property_LIS_Result };
+        searchData = removeFeature(searchData.getPacs(), fa, searchData, LIS_Feature, FeatureType.LIS);
+
+        return searchData;
+    }
+
+    private SearchData removeFeature(String text, FeatureAnalyze fa,
+                                     SearchData searchData, String[] properties, FeatureType featureType) {
+        String name = "";
+        Boolean related = false;
+
+        try {
+            List<Map<String, Object>> featureList = fa.start(text, featureType);
+            if (featureList != null) {
+                for (Map<String, Object> item : featureList) {
+                    name = item.get("feature_name").toString();
+                    String[] property = item.get("property").toString().split(",");
+                    for (String prop : property) {
+                        if (Arrays.asList(properties).contains(prop)) {
+                            //                            related = true;
+                            searchData.getInputs().remove(name);
+                            break;
+                        }
+                    }
+                }
+            }
+
+        } catch (Exception ex) {
+            ex.printStackTrace();
+        } finally {
+            return searchData;
+        }
+    }
+
+    /**
+     * 推送模型入参
+     *
+     * @param searchData
+     * @throws Exception
+     */
+    private void paramFeatureInit(SearchData searchData, List<Map<String, Object>> featuresList) throws Exception {
+        if (featuresList != null && featuresList.size() > 0) {
+            Map<String, Object> featureMap = null;
+            for (int i = 0; i < featuresList.size(); i++) {
+                featureMap = featuresList.get(i);
+                Map<String, String> map = new HashMap<>();
+                for (Map.Entry<String, Object> entry : featureMap.entrySet()) {
+                    map.put(entry.getKey(), String.valueOf(entry.getValue()));
+                }
+                map.put("featureType", String.valueOf(featureMap.get("feature_type")));
+                map.put("featureName", String.valueOf(featureMap.get("feature_name")));
+                map.put("property", String.valueOf(featureMap.get("property")));
+                map.put("concept", String.valueOf(featureMap.get("concept")));
+                if (Constants.default_negative.equals(featureMap.get("negative"))) {
+                    if (map.get("featureType").equals(Constants.feature_type_time)) {
+//                        searchData.getInputs().put("时间", map);
+                    } else {
+                        if (searchData.getInputs().get(map.get("feature_name")) == null) {
+                            if (i < 8) {
+                                searchData.getInputs().put(map.get("feature_name"), map);
+                            }
+                            searchData.getGraphInputs().put(map.get("feature_name"), map);
+                        }
+                    }
+                } else {
+                    searchData.getFilters().put(map.get("feature_name"), map);
+                }
+            }
+        }
+    }
+}

+ 92 - 0
common-push/src/main/java/org/diagbot/common/push/work/RelationExtractionUtil.java

@@ -0,0 +1,92 @@
+package org.diagbot.common.push.work;
+
+import org.algorithm.core.cnn.entity.Lemma;
+import org.algorithm.core.cnn.entity.Triad;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
+import org.diagbot.common.push.util.PushConstants;
+import org.diagbot.nlp.participle.ParticipleUtil;
+import org.diagbot.nlp.participle.word.Lexeme;
+import org.diagbot.nlp.participle.word.LexemePath;
+import org.diagbot.nlp.util.Constants;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/9/9 17:30
+ */
+public class RelationExtractionUtil {
+    public List<Triad> createTriad(SearchData searchData) throws IOException {
+        List<Triad> triads = new ArrayList<>();
+        String[] pacsSplits = searchData.getPacs().trim().split("。|\n");
+        List<Lemma> lemmaList = new ArrayList<>();
+        Lemma lemma = null;
+        for (String pacsSplit : pacsSplits) {
+            LexemePath<Lexeme> pacsLexemes = ParticipleUtil.participlePacs(pacsSplit);
+            for (int i = 0; i < pacsLexemes.size(); i++) {
+                //跳过非医学词
+                if (Constants.word_property_other.equals(pacsLexemes.get(i).getProperty())) {
+                    continue;
+                }
+                lemma = new Lemma();
+                lemma.setText(pacsLexemes.get(i).getText());
+                lemma.setPosition(String.valueOf(pacsLexemes.get(i).getOffset()) + "," + (Integer.valueOf(pacsLexemes.get(i).getOffset() + pacsLexemes.get(i).getLength()) - 1));
+                lemma.setProperty(PushConstants.featureTypeMap.get(pacsLexemes.get(i).getProperty()));
+                lemmaList.add(lemma);
+            }
+        }
+        for (int i = 0; i < lemmaList.size() - 1; i++) {
+            for (int j = i + 1; j < lemmaList.size(); j++) {
+                Triad triad = new Triad();
+                triad.setL_1(lemmaList.get(i));
+                triad.setL_2(lemmaList.get(j));
+                triads.add(triad);
+            }
+        }
+        return triads;
+    }
+
+    public void addToSearchDataInputs(List<List<String>> relationExtractionContents, SearchData searchData) throws Exception {
+        StringBuffer sb = null;
+        for (List<String> contents : relationExtractionContents) {
+            sb = new StringBuffer();
+            for (String content : contents) {
+                sb.append(content);
+            }
+            if (isExist(sb.toString())) {
+                Map<String, String> map = new HashMap<>();
+                map.put("featureType", "5");
+                map.put("featureName", sb.toString());
+                map.put("property", "17");
+                map.put("concept", sb.toString());
+                //全是有
+                map.put("negative", Constants.default_negative);
+                if (searchData.getInputs().get(map.get("featureName")) == null) {
+                    searchData.getInputs().put(map.get("featureName"), map);
+                }
+            }
+        }
+    }
+
+    /**
+     * 关系抽取输出的content是否在已有诊断依据中存在
+     * @param content
+     * @return
+     */
+    public boolean isExist(String content){
+        Map<String, Map<String, String>> kl_diagnose_detail_filter_map = ApplicationCacheUtil.getKl_diagnose_detail_filter_map();
+        if (kl_diagnose_detail_filter_map.get("4") != null){
+            if (kl_diagnose_detail_filter_map.get("4").containsKey(content)){
+                return true;
+            }
+        }
+        return false;
+    }
+
+}

+ 5 - 0
graph-web/pom.xml

@@ -35,6 +35,11 @@
 			<artifactId>common-service</artifactId>
 			<version>1.0.0</version>
 		</dependency>
+		<dependency>
+			<groupId>org.diagbot</groupId>
+			<artifactId>common-push</artifactId>
+			<version>1.0.0</version>
+		</dependency>
 		<dependency>
 			<groupId>org.diagbot</groupId>
 			<artifactId>nlp</artifactId>

+ 2 - 10
graph-web/src/main/java/org/diagbot/graphWeb/controller/GraphController.java

@@ -1,18 +1,13 @@
 package org.diagbot.graphWeb.controller;
 
-import com.alibaba.fastjson.JSONObject;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
 import org.diagbot.graph.javabean.GdbResponse;
-import org.diagbot.graph.jdbc.DriverManager;
-import org.diagbot.graph.jdbc.Neo4jAPI;
-import org.diagbot.graphWeb.dao.BackResponse;
 import org.diagbot.graphWeb.work.DiseaseCalculate;
 import org.diagbot.graphWeb.work.GraphCalculate;
 import org.diagbot.graphWeb.work.HighRiskCalculate;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
 import org.diagbot.graphWeb.work.LisPacsCalculate;
 import org.diagbot.pub.api.Response;
-import org.diagbot.pub.utils.PropertiesUtil;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.RequestBody;
 import org.springframework.web.bind.annotation.RequestMapping;
@@ -20,9 +15,6 @@ import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
 
 import javax.servlet.http.HttpServletRequest;
-import java.util.Map;
-
-import static org.diagbot.graph.jdbc.DriverManager.propertiesUtil;
 
 @Controller
 @RequestMapping("/graph")

+ 4 - 8
graph-web/src/main/java/org/diagbot/graphWeb/work/DiseaseCalculate.java

@@ -1,18 +1,14 @@
 package org.diagbot.graphWeb.work;
 
-import com.alibaba.fastjson.JSONObject;
-import org.diagbot.common.javabean.*;
-import org.diagbot.common.work.FeatureRate;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
-import org.diagbot.nlp.relation.module.Lis;
-import org.diagbot.pub.api.Response;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.bean.neo4j.*;
 import org.diagbot.pub.utils.PropertiesUtil;
 import org.diagbot.pub.utils.http.HttpApi;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.servlet.http.HttpServletRequest;
 import java.util.*;
 
 /**

+ 3 - 4
graph-web/src/main/java/org/diagbot/graphWeb/work/FilterSortDiag.java

@@ -1,10 +1,9 @@
 package org.diagbot.graphWeb.work;
 
 import org.apache.commons.lang3.StringUtils;
-import org.diagbot.common.work.FeatureRate;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.graph.jdbc.Neo4jAPI;
-import org.diagbot.graph.util.CacheUtil;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.cache.CacheUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 91 - 293
graph-web/src/main/java/org/diagbot/graphWeb/work/GraphCalculate.java

@@ -4,21 +4,21 @@ import com.alibaba.fastjson.JSON;
 import com.alibaba.fastjson.JSONArray;
 import com.alibaba.fastjson.JSONObject;
 import org.apache.commons.lang3.StringUtils;
-import org.diagbot.common.javabean.MangementEvaluation;
-import org.diagbot.common.work.FeatureRate;
-import org.diagbot.common.javabean.Filnlly;
-import org.diagbot.common.work.LisDetail;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.PreResult;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.bean.neo4j.Filnlly;
+import org.diagbot.common.push.bean.neo4j.MangementEvaluation;
+import org.diagbot.common.push.bean.neo4j.MedicalIndication;
+import org.diagbot.common.push.bean.neo4j.MedicalIndicationDetail;
+import org.diagbot.common.push.cache.CacheUtil;
 import org.diagbot.graph.jdbc.DriverManager;
 import org.diagbot.graph.jdbc.Neo4jAPI;
 
 import javax.servlet.http.HttpServletRequest;
 import java.util.*;
 
-import org.diagbot.common.javabean.MedicalIndication;
-import org.diagbot.common.javabean.MedicalIndicationDetail;
-import org.diagbot.graph.util.CacheUtil;
 import org.diagbot.graphWeb.util.MapValueComparator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -39,16 +39,14 @@ public class GraphCalculate {
 
         long starttime = System.currentTimeMillis();
         System.out.println("Start at: " + starttime);
-
         ResponseData responseData = new ResponseData();
 //        ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
 //        paramsDataProxy.createSearchData(request, searchData);
         List<String> inputList = new ArrayList<>();
         int age = searchData.getAge();
         String sex = searchData.getSex();
+        String diseaseName = searchData.getDiseaseName();
         logger.info("前端传来的年龄为 :"+age+" 前端传来的性别为 :"+sex);
-        //获取缓存
-//        Map<String, String> lexionCache = CacheUtil.getLexionCache();
         Map<String, Map<String, String>> sexAgeCache = CacheUtil.getSexAgeCache();
         Map<String, Map<String, String>> inputs = searchData.getGraphInputs();
         Set<String> ss = new HashSet<>();
@@ -62,9 +60,14 @@ public class GraphCalculate {
                 }
             }
         }
+        Map<String, Map<String, String>> inputsMap = searchData.getInputs();
+        if(inputsMap != null && inputsMap.size()>0){
+            if(inputsMap.keySet()!=null && inputsMap.keySet().size()>0){
+                ss.addAll(inputsMap.keySet()) ;
+            }
+        }
         logger.info("从分词系统接收到的词 :" + ss);
         System.out.println("Participle takes: " + (System.currentTimeMillis()-starttime)/1000d + 's');
-
         List<String> featureTypeList = Arrays.asList(searchData.getFeatureTypes());
         logger.info("featureTypeList : " + featureTypeList);
         inputList.addAll(ss);
@@ -74,39 +77,38 @@ public class GraphCalculate {
         logger.info("图谱开始推送诊断!!!!!!!!!!!");
         String webDiag = searchData.getDiag();
         logger.info("页面诊断为 :"+webDiag);
+        String[] webDiagList = webDiag.split(",|,|、|;|:|;");
         //计算诊断
-        Map<String, Map<String,String>> condition =null;
+        Map<String, Object> condition =null;
         Map<String, Map<String, String>> excludelist = null;
         if(featureTypeList.contains("2")){
             condition = neo4jAPI.getNewCondition((String[]) inputList.toArray(new String[inputList.size()]),webDiag );
-
             // 查找需要排除的诊断
             excludelist = neo4jAPI.getExcludeDiag(inputList);
             responseData.setExcludeDiag(Arrays.asList(excludelist.keySet().stream().toArray(String[]::new)));
         }
-
         List<FeatureRate> featureRates = new ArrayList<>();
         if(condition != null){
-            for (Map.Entry<String, Map<String,String>> d : condition.entrySet()) {
-                String dis = d.getKey();
-                Map<String, String> sexAgeMap = sexAgeCache.get(dis);
-                if(sexAgeMap != null){
-                    String sexType = sexAgeMap.get("sexType");
-                    Integer min_age = Integer.parseInt(sexAgeMap.get("min_age"));
-                    Integer max_age = Integer.parseInt(sexAgeMap.get("max_age"));
-
-                    if(("1".equals(sexType) &&sex.equals(sexType)) || ("2".equals(sexType) &&sex.equals(sexType)) || "3".equals(sexType)){
-                        if(min_age <age && age<=max_age){
-                            FeatureRate featureRate = new FeatureRate();
-                            featureRate.setFeatureName(dis);
-                            Map<String, String> value = d.getValue();
-                            String s = JSON.toJSONString(value);
-                            featureRate.setDesc(s);
-//                            featureRate.setRate("neo4j");
-                            featureRate.setSource("neo4j");
-                            featureRates.add(featureRate);
+            Map<String, Map<String,String>> allCondition = (Map<String, Map<String,String>>)condition.get("全部诊断");
+            if(allCondition != null && allCondition.size()>0){
+                for (Map.Entry<String, Map<String,String>> d : allCondition.entrySet()) {
+                    String dis = d.getKey();
+                    Map<String, String> sexAgeMap = sexAgeCache.get(dis);
+                    if(sexAgeMap != null){
+                        String sexType = sexAgeMap.get("sexType");
+                        Integer min_age = Integer.parseInt(sexAgeMap.get("min_age"));
+                        Integer max_age = Integer.parseInt(sexAgeMap.get("max_age"));
+                        if(("1".equals(sexType) &&sex.equals(sexType)) || ("2".equals(sexType) &&sex.equals(sexType)) || "3".equals(sexType)){
+                            if(min_age <age && age<=max_age){
+                                FeatureRate featureRate = new FeatureRate();
+                                featureRate.setFeatureName(dis);
+                                Map<String, String> value = d.getValue();
+                                String s = JSON.toJSONString(value);
+                                featureRate.setDesc(s);
+                                featureRate.setSource("neo4j");
+                                featureRates.add(featureRate);
+                            }
                         }
-
                     }
                 }
             }
@@ -117,298 +119,94 @@ public class GraphCalculate {
             logger.info("diseaseSet :" + diseaseSet);
         }
         Integer diseaseType = searchData.getDisType();
-        List<LisDetail> lisArr = searchData.getLisArr();
+        List<PreResult> lisArr = searchData.getLisArr();
         Set<String> lisSet = new HashSet<>();
         Map<String,Double> lis_Result = new HashMap<>();
         if(lisArr != null && lisArr.size() > 0){
-            for (LisDetail lis:lisArr) {
+            for (PreResult lis:lisArr) {
                 String detailName = lis.getUniqueName();
                 logger.info("公表名为  :"+detailName);
-                Double value = lis.getValue();
-                lis_Result.put(detailName,value);
-                lisSet.add(detailName);
+                String lisValue = lis.getValue();
+                if(StringUtils.isNotEmpty(lisValue)){
+                    Double value = Double.valueOf(lisValue);
+                    lis_Result.put(detailName,value);
+                    lisSet.add(detailName);
+                }
             }
         }
         logger.info("页面导入的所有化验项为 :" +lisSet);
-
         //走治疗
-        if (webDiag !=null && webDiag.trim() != null && webDiag.trim() != "" && featureTypeList.contains("6")) {
+        if (StringUtils.isNotEmpty(diseaseName) && featureTypeList.contains("6")) {
             // 查找页面诊断里是否有不良反应
-            String[] webDiagList = webDiag.split(",|,|、|;|:|;");
-            Map<String, List<String>> disUE = neo4jAPI.getDisUE(webDiagList, diseaseType);
+            Map<String, List<String>> disUE = neo4jAPI.getDisUE(diseaseName, diseaseType);
             //根据页面输入内容推出的不良反应集合
             Set<String> ue = neo4jAPI.getUe((String[]) inputList.toArray(new String[inputList.size()]));
             //走平常诊断治疗
-            Map<String, Filnlly> mulDiseaseTreat = neo4jAPI.getMulDiseaseTreat_2(webDiag, diseaseType, diseaseSet,disUE,ue,String.join(",", inputList));
+            Map<String, Filnlly> mulDiseaseTreat = neo4jAPI.getMulDiseaseTreat_2(diseaseName,webDiag, diseaseType, diseaseSet,disUE,ue,String.join(",", inputList));
             responseData.setTreat(mulDiseaseTreat);
         }
         //管理评估(慢病才有)
         if (featureTypeList.contains("11") && diseaseType == 1 && diseaseType != null) {
             logger.info("featureTypeList 包含11,走管理评估!!!");
             if(webDiag != null){
-                String[] webDiagsplits = webDiag.split(",|,|、|;|:|;");
-                MangementEvaluation mangementEvaluation = neo4jAPI.pushMe(webDiagsplits,lis_Result);
+                MangementEvaluation mangementEvaluation = neo4jAPI.pushMe(webDiagList,lis_Result);
                 Map<String, JSONObject> mangementEvaluation1 = mangementEvaluation.getMangementEvaluation();
                 responseData.setManagementEvaluation(mangementEvaluation1);
             }
         }
-
         //指标推送
         if (featureTypeList.contains("22") ) {
+            List<MedicalIndication> idns =new ArrayList<>();
+            Set<String> newindSet = new HashSet<>();
+            Set<String> newindSet1 = new HashSet<>();
             //查找指标
-            Set<String> indSet = neo4jAPI.getInd((String[]) inputList.toArray(new String[inputList.size()]));
-            logger.info("featureTypeList 包含22,走指标推送!!!,图谱推出的指标为:" + indSet);
-            List<MedicalIndication> idn = neo4jAPI.getIdn(indSet, age, sex);
-            responseData.setMedicalIndications(idn);
-
-        }
-
-        //诊断推送
-        responseData.setDis(featureRates);
-        responseData.setInputs(searchData.getInputs());
-
-        System.out.println("Total takes: " + (System.currentTimeMillis()-starttime)/1000d + 's');
-        return responseData;
-    }
-//    诊断过滤
-    public void filterDis(List<FeatureRate> graphFeatureRates,String sex,Integer age) throws Exception {
-        if(neo4jAPI == null){
-            neo4jAPI = new Neo4jAPI(DriverManager.newDrive());
-        }
-        List<String> disList = new ArrayList<>();
-        if(graphFeatureRates != null && graphFeatureRates.size()>0){
-            for (FeatureRate f:graphFeatureRates) {
-                disList.add("\""+f.getFeatureName()+"\"");
-            }
-        }
-//        第一步先过滤性别和年龄
-        Set<String> filterSexAgeList = neo4jAPI.filterDisFromSexAge(disList,sex,age);
-
-
-    }
-    /**
-     * 返回LIS,PACS
-     *
-     * @param searchData
-     */
-    public ResponseData getLisPacs(HttpServletRequest request, SearchData searchData) throws Exception {
-        ResponseData responseData = new ResponseData();
-        Neo4jAPI neo4jAPI = new Neo4jAPI(DriverManager.newDrive());
-        String webDiag = searchData.getDiag();
-        List<String> webDiagList = Arrays.asList(webDiag.split(",|,|、"));
-        List<FeatureRate> bigdataDiagFeature = searchData.getPushDiags();
-        List<String> bigdataDiagList = new LinkedList<>();
-        if (bigdataDiagFeature.size() > 0) {
-            for (FeatureRate fe : bigdataDiagFeature) {
-                if ("neo4j".equals(fe.getRate())) {
-                    bigdataDiagList.add(fe.getFeatureName());
-                    logger.info("图谱推出的诊断为: " + fe.getFeatureName());
-                } else {
-                    bigdataDiagList.add(fe.getFeatureName());
-                    logger.info("大数据推出的诊断为: " + fe.getFeatureName());
-                }
-            }
-        }
-        for (String web : webDiagList) {
-            for (int i = 0; i < bigdataDiagList.size(); i++) {
-                if (bigdataDiagList.get(i).equals(web)) {
-                    bigdataDiagList.remove(bigdataDiagList.get(i));
+            Set<String> indSet = neo4jAPI.getInd((String[]) inputList.toArray(new String[inputList.size()]),sex,age);
+            if(indSet != null && indSet.size()>0){
+                for (String ind:indSet) {
+                    if("肾功能不全".equals(ind)){
+                        newindSet.add(ind);
+                    }else {
+                        newindSet1.add(ind);
+                    }
                 }
             }
-        }
-        logger.info("界面诊断为: " + webDiagList);
-        logger.info("推出的诊断合并为: " + bigdataDiagList);
-        Map<String, Set<String>> weblisPacs1 = null;
-        Map<String, Set<String>> biglisPacs1 = null;
-        if (webDiagList != null && webDiagList.size() > 0) {
-            weblisPacs1 = neo4jAPI.getLisPacs(webDiagList);//界面诊断推出的LIS,PACS
-        }
-        if (bigdataDiagList != null && bigdataDiagList.size() > 0) {
-            biglisPacs1 = neo4jAPI.getLisPacs(bigdataDiagList);//大数据推得诊断
-        }
-        Set<String> lis = null;
-        Set<String> pacs = null;
-        //如果界面有诊断
-        if (weblisPacs1 != null && weblisPacs1.values().size() > 0) {
-            lis = weblisPacs1.get("LIS");
-            pacs = weblisPacs1.get("PACS");
-            logger.info("界面有诊断的情况下,界面诊断推出的lis为: " + lis);
-            logger.info("界面有诊断的情况下,界面诊断推出的pacs为: " + pacs);
-            if (biglisPacs1 != null && biglisPacs1.values().size() > 0) {
-                Set<String> bl = biglisPacs1.get("LIS");
-                Set<String> bp = biglisPacs1.get("PACS");
-                logger.info("界面有诊断的情况下,推出诊断的lis为: " + bl);
-                logger.info("界面有诊断的情况下,推出诊断的pacs为: " + bp);
-                lis.addAll(bl);
-                pacs.addAll(bp);
-            }
-        } else {
-            lis = biglisPacs1.get("LIS");
-            pacs = biglisPacs1.get("PACS");
-            logger.info("界面无诊断的情况下,推出诊断的lis为: " + lis);
-            logger.info("界面无诊断的情况下,推出诊断的lis为: " + pacs);
-        }
-        logger.info("推出的合并lis为: " + lis);
-        logger.info("推出的合并pacs为: " + pacs);
-        ArrayList<FeatureRate> lisFeature = new ArrayList<>();
-        ArrayList<FeatureRate> pacsFeature = new ArrayList<>();
-        for (String l : lis) {
-            FeatureRate featureRate = new FeatureRate();
-            featureRate.setFeatureName(l);
-            lisFeature.add(featureRate);
-        }
-        for (String p : pacs) {
-            FeatureRate featureRate = new FeatureRate();
-            featureRate.setFeatureName(p);
-            pacsFeature.add(featureRate);
-        }
-        responseData.setLabs(lisFeature);
-        responseData.setPacs(pacsFeature);
-        return responseData;
-    }
-
+            Map<String, String> indLiang =null;
+            if(newindSet1 != null && newindSet1.size()>0){
+                indLiang =neo4jAPI.getIndLiang(newindSet1);
 
-    /**
-     * 对化验和检查的结果进行排序
-     *
-     * @param set
-     * @return List<FeatureRate>
-     */
-    public Set<String> processResult(Set<String> set) {
-        Set<String> frlist = new LinkedHashSet<>();
-        Map<String, String> sortval = new HashMap<>();
-        Map<String, String> items = new HashMap<>();
-        String name;
-        try {
-            if (set != null && set.size() > 0) {
-                for (String item : set) {
-                    if (sortval.get(item) == null) {
-                        sortval.put(item, "1");
-                    } else {
-                        sortval.put(item, String.valueOf(Integer.parseInt(sortval.get(item)) + 1));
-                    }
-                }
-                sortval = sortMapByValue(sortval);
-                for (String key : sortval.keySet()) {
-                    frlist.add(items.get(key));
+            }
+            logger.info("featureTypeList 包含22,走指标推送!!!,图谱推出的指标为:" + indSet);
+            List<MedicalIndication> idn =null;
+            if(newindSet.contains("肾功能不全")){
+                idn = neo4jAPI.getIdn(newindSet, age, sex);
+                if(idn!= null && idn.size()>0){
+                    idns.addAll(idn);
                 }
             }
-        } catch (Exception ex) {
-            ex.printStackTrace();
-        } finally {
-            return frlist;
-        }
-    }
-
-
-    /**
-     * 使用 Map按value进行排序
-     *
-     * @param oriMap
-     * @return
-     */
-    public static Map<String, String> sortMapByValue(Map<String, String> oriMap) {
-        if (oriMap == null || oriMap.isEmpty()) {
-            return null;
-        }
-        Map<String, String> sortedMap = new LinkedHashMap<String, String>();
-        List<Map.Entry<String, String>> entryList = new ArrayList<Map.Entry<String, String>>(
-                oriMap.entrySet());
-        Collections.sort(entryList, new MapValueComparator());
-        Iterator<Map.Entry<String, String>> iter = entryList.iterator();
-        Map.Entry<String, String> tmpEntry = null;
-        while (iter.hasNext()) {
-            tmpEntry = iter.next();
-            sortedMap.put(tmpEntry.getKey(), tmpEntry.getValue());
-        }
-        return sortedMap;
-    }
-
-
-    public Map<String, Object> scaleCalcMethod(MedicalIndicationDetail medicalIndicationDetail) throws Exception {
-        Map<String, Object> scaleCalcResult = new HashMap<>();
-        Integer type = medicalIndicationDetail.getType();
-        if (type == 2) {
-            JSONObject content = medicalIndicationDetail.getContent();
-            JSONArray contentDetails = content.getJSONArray("details");
-            if ("肾小球滤过率".equals(content.get("name"))) {
-                int age = 0;
-                double scr = 0.00;
-                float k = 0.0f;
-                double a = 0.00;
-                double denger = 0.00;
-                for (int i = 0; i < contentDetails.size(); i++) {
-                    JSONObject detailSub = contentDetails.getJSONObject(i);
-                    if ("年龄".equals(detailSub.getString("name"))) {
-                        if ("".equals(detailSub.getString("value"))) {
-                            //如果拿到的年龄为空,
-                            break;
-                        } else {
-                            age = Integer.parseInt(detailSub.getString("value"));
-                        }
-                    } else if ("血肌酐".equals(detailSub.getString("name"))) {
-                        if ("".equals(detailSub.getString("value"))) {
-                            //如果给的value是空,给的2.2621是假数据
-                            break;
-                        } else {
-                            if ("umol/L".equals(detailSub.getString("value"))) {
-                                scr = Double.valueOf(detailSub.getString("value")) / 88.41;
-                            } else {
-                                scr = Double.valueOf(detailSub.getString("value"));
-                            }
-                        }
-                    } else if ("性别".equals(detailSub.getString("name"))) {
-                        JSONArray genderDetails = detailSub.getJSONArray("details");
-                        for (int j = 0; j < genderDetails.size(); j++) {
-                            JSONObject genderDetail = genderDetails.getJSONObject(j);
-                            //返回的数据结构性别暂时是写死(默认女性)
-                            if (genderDetail.getInteger("state") == 1) {
-                                if ("男".equals(genderDetail.getString("detailName"))) {
-                                    k = 0.9f;
-                                    denger = Double.parseDouble(genderDetail.getString("value"));
-                                    if (scr <= 0.90) {
-                                        a = -0.411;
-                                    } else {
-                                        a = -1.209;
-                                    }
-                                } else if ("女".equals(genderDetail.getString("detailName"))) {
-                                    k = 0.7f;
-                                    denger = Double.parseDouble(genderDetail.getString("value"));
-                                    if (scr <= 0.70) {
-                                        a = -0.329;
-                                    } else {
-                                        a = -1.209;
-                                    }
-                                }
-                            }
-                        }
+            if(newindSet1 != null && newindSet1.size()>0){
+                for (String ind:newindSet1
+                     ) {
+                    MedicalIndication medicalIndication= new MedicalIndication();
+                    medicalIndication.setName(ind);
+                    List<MedicalIndicationDetail> ds = new ArrayList<>();
+                    MedicalIndicationDetail medicalIndicationDetail = new MedicalIndicationDetail();
+                    medicalIndicationDetail.setType(1);
+                    JSONObject jsonObject = new JSONObject();
+                    if(indLiang != null){
+                        jsonObject.put("name",indLiang.get(ind)); 
                     }
+                    medicalIndicationDetail.setContent(jsonObject);
+                    ds.add(medicalIndicationDetail);
+                    medicalIndication.setDetails(ds);
+                    idns.add(medicalIndication);
                 }
-                double eGFR3 = 141 * Math.pow((scr / k), a) * Math.pow(0.993, age) * denger;
-                String unit = "ml/min•1.73m2";
-                String text = null;
-                if (eGFR3 <= 0) {
-                    text = "指标值缺少";
-                } else if (eGFR3 > 0 && eGFR3 < 15) {
-                    text = "肾功能衰竭";
-                } else if (eGFR3 >= 15 && eGFR3 <= 29) {
-                    text = "重度下降";
-                } else if (eGFR3 > 29 && eGFR3 < 60) {
-                    text = "中度下降";
-                } else if (eGFR3 >= 60 && eGFR3 <= 89) {
-                    text = "轻度下降";
-                } else if (eGFR3 > 89) {
-                    text = "正常或肾损伤代偿期";
-                }
-
-                scaleCalcResult.put("text", text);
-                scaleCalcResult.put("unit", unit);
-                scaleCalcResult.put("value", eGFR3);
-                System.out.println("text:" + text + "\tunit:" + unit + "\tvalue:" + eGFR3);
             }
+            responseData.setMedicalIndications(idns);
         }
-
-
-        return scaleCalcResult;
+        //诊断推送
+        responseData.setDis(featureRates);
+        responseData.setInputs(searchData.getInputs());
+        System.out.println("Total takes: " + (System.currentTimeMillis()-starttime)/1000d + 's');
+        return responseData;
     }
 }

+ 1 - 5
graph-web/src/main/java/org/diagbot/graphWeb/work/HighRiskCalculate.java

@@ -1,16 +1,12 @@
 package org.diagbot.graphWeb.work;
 
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.SearchData;
 import org.diagbot.graph.javabean.GdbResponse;
-import org.diagbot.graph.jdbc.DriverManager;
-import org.diagbot.graph.jdbc.Neo4jAPI;
 import org.diagbot.pub.api.Response;
 import org.diagbot.pub.utils.PropertiesUtil;
 import org.diagbot.pub.utils.http.HttpApi;
-import org.neo4j.driver.v1.Driver;
 
 import javax.servlet.http.HttpServletRequest;
-import java.util.LinkedHashMap;
 import java.util.Map;
 
 public class HighRiskCalculate {

+ 3 - 3
graph-web/src/main/java/org/diagbot/graphWeb/work/LisPacsCalculate.java

@@ -1,8 +1,8 @@
 package org.diagbot.graphWeb.work;
 
-import org.diagbot.common.work.FeatureRate;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
 import org.diagbot.pub.api.Response;
 import org.diagbot.pub.utils.PropertiesUtil;
 import org.diagbot.pub.utils.http.HttpApi;

+ 0 - 84
graph-web/src/main/java/org/diagbot/graphWeb/work/ParamsDataProxy.java

@@ -1,84 +0,0 @@
-package org.diagbot.graphWeb.work;
-
-import org.apache.commons.beanutils.BeanUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.diagbot.common.work.SearchData;
-import org.diagbot.nlp.feature.FeatureAnalyze;
-import org.diagbot.nlp.feature.FeatureType;
-import org.diagbot.nlp.util.Constants;
-
-import javax.servlet.http.HttpServletRequest;
-import java.util.*;
-
-public class ParamsDataProxy {
-    public void createSearchData(HttpServletRequest request, SearchData searchData)throws Exception {
-        //消除空格
-        if (searchData.getSymptom() != null) {
-            searchData.setSymptom(searchData.getSymptom().trim());
-        }
-        if (searchData.getDiag() != null) {
-            searchData.setDiag(searchData.getDiag().trim());
-        }
-        //计算年龄区间
-        if (searchData.getAge() > 0) {
-            searchData.setAge_start(searchData.getAge() - 5);
-            searchData.setAge_end(searchData.getAge() + 5);
-        }
-        //默认查询门诊数据
-//        if (org.springframework.util.StringUtils.isEmpty(searchData.getResourceType())) {
-//            searchData.setResourceType(BigDataConstants.resource_type_o);
-//        }
-        //所有信息参与推送
-        searchData.setSymptom(searchData.getSymptom() + searchData.getVital()
-                + searchData.getLis() + searchData.getPacs() + searchData.getPast() + searchData.getOther() + searchData.getIndications());
-        searchData.setSymptom(searchData.getSymptom().trim());
-        //一次推送多个类别信息
-        String[] featureTypes = searchData.getFeatureType().split(",");
-        searchData.setFeatureTypes(featureTypes);
-
-        //获取入参中的特征信息
-        FeatureAnalyze fa = new FeatureAnalyze();
-
-        if (!org.springframework.util.StringUtils.isEmpty(searchData.getSymptom())) {
-            List<Map<String, Object>> featuresList = fa.start(searchData.getSymptom(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-
-            //如果既往史中诊断信息,需要提取这个特征
-            featuresList = fa.start(searchData.getOther(), FeatureType.DIAG);
-            paramFeatureInit(searchData, featuresList);
-        }
-
-        if (!org.springframework.util.StringUtils.isEmpty(searchData.getDiag()) && org.springframework.util.StringUtils.isEmpty(searchData.getSymptom())) {
-            List<Map<String, Object>> featuresList = fa.start(searchData.getDiag(), FeatureType.DIAG);
-            paramFeatureInit(searchData, featuresList);
-        }
-
-    }
-
-    /**
-     * 推送模型入参
-     *
-     * @param searchData
-     * @throws Exception
-     */
-    private void paramFeatureInit(SearchData searchData, List<Map<String, Object>> featuresList) throws Exception {
-        if (featuresList != null && featuresList.size() > 0) {
-//            BeanUtils.setProperty(searchData, property_list, featuresList);
-            Map<String, Object> featureMap = null;
-            for (int i = 0; i < featuresList.size(); i++) {
-                featureMap = featuresList.get(i);
-                Map<String, String> map = new HashMap<>();
-                for (Map.Entry<String, Object> entry : featureMap.entrySet()) {
-                    map.put(entry.getKey(), String.valueOf(entry.getValue()));
-                }
-                map.put("featureType", String.valueOf(featureMap.get("feature_type")));
-                map.put("featureName", String.valueOf(featureMap.get("feature_name")));
-                if (Constants.default_negative.equals(featureMap.get("negative"))) {
-                    searchData.getInputs().put(map.get("feature_name"), map);
-                } else {
-                    searchData.getFilters().put(map.get("feature_name"), map);
-                }
-            }
-        }
-    }
-}

+ 3 - 3
graph-web/src/main/resources/url.properties

@@ -1,5 +1,5 @@
-highrisk.http.url=http://192.168.2.234:5004/knowledge/getHighRisk
-lisPacs.http.url=http://192.168.2.234:5004/knowledge/getLisPacs
-disease.http.url=http://192.168.2.234:5004/knowledge/getDisease
+highrisk.http.url=http://192.168.2.186:5004/knowledge/getHighRisk
+lisPacs.http.url=http://192.168.2.186:5004/knowledge/getLisPacs
+disease.http.url=http://192.168.2.186:5004/knowledge/getDisease
 #lisPacs.http.url=http://192.168.3.9:5004/knowledge/getLisPacs
 #highrisk.http.url=http://192.168.3.9:5004/knowledge/getHighRisk

+ 5 - 1
graph/pom.xml

@@ -27,7 +27,11 @@
             <artifactId>common-service</artifactId>
             <version>1.0.0</version>
         </dependency>
-
+        <dependency>
+            <groupId>org.diagbot</groupId>
+            <artifactId>common-push</artifactId>
+            <version>1.0.0</version>
+        </dependency>
         <dependency>
             <groupId>org.diagbot</groupId>
             <artifactId>nlp</artifactId>

+ 0 - 0
graph/src/main/java/org/diagbot/graph/jdbc/Neo4jAPI.java


Some files were not shown because too many files changed in this diff