Sfoglia il codice sorgente

153服务器故障,代码重新迁移

louhr 6 anni fa
parent
commit
e79d701693
100 ha cambiato i file con 180338 aggiunte e 0 eliminazioni
  1. 26 0
      .gitignore
  2. 43 0
      algorithm/pom.xml
  3. 14 0
      algorithm/src/main/java/org/algorithm/core/AlgorithmExecutor.java
  4. 5 0
      algorithm/src/main/java/org/algorithm/core/DataSet.java
  5. 111 0
      algorithm/src/main/java/org/algorithm/core/bayes/AlgorithmBayesExecutor.java
  6. 21 0
      algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutor.java
  7. 67 0
      algorithm/src/main/java/org/algorithm/core/cnn/model/Lemma.java
  8. 38 0
      algorithm/src/main/java/org/algorithm/core/cnn/model/Triad.java
  9. 29 0
      algorithm/src/main/java/org/algorithm/core/neural/AlgorithmNeuralExecutor.java
  10. 21 0
      algorithm/src/main/java/org/algorithm/core/neural/DiagnosisPredictExecutor.java
  11. 22 0
      algorithm/src/main/java/org/algorithm/core/neural/DiagnosisToLisExecutor.java
  12. 22 0
      algorithm/src/main/java/org/algorithm/core/neural/DiagnosisToPacsExecutor.java
  13. 21 0
      algorithm/src/main/java/org/algorithm/core/neural/DiagnosisToSymptomExecutor.java
  14. 22 0
      algorithm/src/main/java/org/algorithm/core/neural/DiagnosisToVitalExecutor.java
  15. 23 0
      algorithm/src/main/java/org/algorithm/core/neural/LisPredictExecutor.java
  16. 22 0
      algorithm/src/main/java/org/algorithm/core/neural/PacsPredictExecutor.java
  17. 22 0
      algorithm/src/main/java/org/algorithm/core/neural/SymptomPredictExecutor.java
  18. 44 0
      algorithm/src/main/java/org/algorithm/core/neural/TensorFlowModelLoadFactory.java
  19. 105 0
      algorithm/src/main/java/org/algorithm/core/neural/TensorflowModel.java
  20. 22 0
      algorithm/src/main/java/org/algorithm/core/neural/VitalPredictExecutor.java
  21. 89 0
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSet.java
  22. 171 0
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImpl.java
  23. 179 0
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImplNonParallel.java
  24. 72 0
      algorithm/src/main/java/org/algorithm/core/neural/neuroph/NeurophLearning.java
  25. 89 0
      algorithm/src/main/java/org/algorithm/factory/AlgorithmFactory.java
  26. 54 0
      algorithm/src/main/java/org/algorithm/test/HelloTF.java
  27. 67 0
      algorithm/src/main/java/org/algorithm/test/NNDataSetImplNonPatallelTest.java
  28. 40 0
      algorithm/src/main/java/org/algorithm/test/NNDataSetImplTest.java
  29. 96 0
      algorithm/src/main/java/org/algorithm/test/TensorflowExcutorTest.java
  30. 23 0
      algorithm/src/main/java/org/algorithm/test/Test.java
  31. 65 0
      algorithm/src/main/java/org/algorithm/util/AlgorithmClassify.java
  32. 103 0
      algorithm/src/main/java/org/algorithm/util/MysqlConnector.java
  33. 49 0
      algorithm/src/main/java/org/algorithm/util/TextFileReader.java
  34. 36 0
      algorithm/src/main/java/org/algorithm/util/Utils.java
  35. 17 0
      algorithm/src/main/resources/algorithm.properties
  36. 174891 0
      algorithm/src/main/resources/dictionaries.bin
  37. 25 0
      bigdata-web/.gitignore
  38. BIN
      bigdata-web/.mvn/wrapper/maven-wrapper.jar
  39. 1 0
      bigdata-web/.mvn/wrapper/maven-wrapper.properties
  40. 286 0
      bigdata-web/mvnw
  41. 161 0
      bigdata-web/mvnw.cmd
  42. 125 0
      bigdata-web/pom.xml
  43. 18 0
      bigdata-web/src/main/java/org/diagbot/BigdataWebApplication.java
  44. 128 0
      bigdata-web/src/main/java/org/diagbot/bigdata/common/ApplicationCacheUtil.java
  45. 97 0
      bigdata-web/src/main/java/org/diagbot/bigdata/common/InitListener.java
  46. 47 0
      bigdata-web/src/main/java/org/diagbot/bigdata/common/RegionInterceptor.java
  47. 24 0
      bigdata-web/src/main/java/org/diagbot/bigdata/config/InterceptorConfig.java
  48. 41 0
      bigdata-web/src/main/java/org/diagbot/bigdata/config/MybatisConfiguration.java
  49. 41 0
      bigdata-web/src/main/java/org/diagbot/bigdata/controller/AlgorithmController.java
  50. 22 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/FeatureMappingMapper.java
  51. 22 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingDiagMapper.java
  52. 22 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingFilterMapper.java
  53. 38 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/FeatureMapping.java
  54. 38 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingDiag.java
  55. 68 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingFilter.java
  56. 7 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/FeatureMappingWrapper.java
  57. 12 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingDiagWrapper.java
  58. 6 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingFilterWrapper.java
  59. 77 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/FeatureMappingMapper.xml
  60. 55 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingDiagMapper.xml
  61. 67 0
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingFilterMapper.xml
  62. 8 0
      bigdata-web/src/main/java/org/diagbot/bigdata/service/FeatureMappingService.java
  63. 8 0
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingDiagService.java
  64. 8 0
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingFilterService.java
  65. 21 0
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/FeatureMappingServiceImpl.java
  66. 21 0
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingDiagServiceImpl.java
  67. 21 0
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingFilterServiceImpl.java
  68. 38 0
      bigdata-web/src/main/java/org/diagbot/bigdata/util/BigDataConstants.java
  69. 88 0
      bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java
  70. 43 0
      bigdata-web/src/main/java/org/diagbot/bigdata/work/FeatureRate.java
  71. 180 0
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ParamsDataProxy.java
  72. 96 0
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ResponseData.java
  73. 194 0
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java
  74. 196 0
      bigdata-web/src/main/java/org/diagbot/bigdata/work/SearchData.java
  75. 38 0
      bigdata-web/src/main/resources/application.yml
  76. 17 0
      bigdata-web/src/test/java/org/diagbot/BigdataWebApplicationTests.java
  77. 30 0
      bigdata/pom.xml
  78. 33 0
      common-service/pom.xml
  79. 27 0
      common-service/src/main/java/org/diagbot/common/dao/mapper/StandardInfoMapper.java
  80. 95 0
      common-service/src/main/java/org/diagbot/common/dao/model/StandardInfo.java
  81. 6 0
      common-service/src/main/java/org/diagbot/common/dao/model/wrapper/StandardInfoWrapper.java
  82. 150 0
      common-service/src/main/java/org/diagbot/common/dao/xml/StandardInfoMapper.xml
  83. 13 0
      common-service/src/main/java/org/diagbot/common/service/StandardInfoService.java
  84. 31 0
      common-service/src/main/java/org/diagbot/common/service/impl/StandardInfoServiceImpl.java
  85. 25 0
      graph-web/.gitignore
  86. BIN
      graph-web/.mvn/wrapper/maven-wrapper.jar
  87. 1 0
      graph-web/.mvn/wrapper/maven-wrapper.properties
  88. 286 0
      graph-web/mvnw
  89. 161 0
      graph-web/mvnw.cmd
  90. 83 0
      graph-web/pom.xml
  91. 14 0
      graph-web/src/main/java/org/diagbot/graph/GraphWebApplication.java
  92. 38 0
      graph-web/src/main/resources/application.yml
  93. 50 0
      graph/pom.xml
  94. 16 0
      graph/src/main/java/org/diagbot/graph/annotation/FromProperty.java
  95. 14 0
      graph/src/main/java/org/diagbot/graph/annotation/RelationName.java
  96. 16 0
      graph/src/main/java/org/diagbot/graph/annotation/ToProperty.java
  97. 35 0
      graph/src/main/java/org/diagbot/graph/javabean/Drugs.java
  98. 15 0
      graph/src/main/java/org/diagbot/graph/javabean/Filnlly.java
  99. 53 0
      graph/src/main/java/org/diagbot/graph/javabean/Medicition.java
  100. 0 0
      graph/src/main/java/org/diagbot/graph/jdbc/DriverManager.java

+ 26 - 0
.gitignore

@@ -0,0 +1,26 @@
+# ---> Idea
+.idea/
+build/
+classes/
+*.iml
+
+# ---> eclipse
+.project
+.classpath
+.settings/
+.externalToolBuilders/
+bin/
+
+# ---> Java
+*.class
+
+logs/
+target/
+
+# Mobile Tools for Java (J2ME)
+.mtj.tmp/
+
+# Package Files #
+*.jar
+*.war
+*.ear

+ 43 - 0
algorithm/pom.xml

@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>push</artifactId>
+        <groupId>org.diagbot</groupId>
+        <version>1.0.0</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>algorithm</artifactId>
+
+    <name>algorithm</name>
+    <!-- FIXME change it to the project's website -->
+    <url>http://www.example.com</url>
+
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.diagbot</groupId>
+            <artifactId>public</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.tensorflow</groupId>
+            <artifactId>tensorflow</artifactId>
+            <version>1.8.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.beykery</groupId>
+            <artifactId>neuroph</artifactId>
+            <version>2.92</version>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <finalName>algorithm</finalName>
+    </build>
+</project>

+ 14 - 0
algorithm/src/main/java/org/algorithm/core/AlgorithmExecutor.java

@@ -0,0 +1,14 @@
+package org.algorithm.core;
+
+import java.util.Map;
+
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/7/19/019 10:29
+ * @Description:
+ */
+public abstract class AlgorithmExecutor {
+
+    public abstract Map<String, Float> execute(Map<String, Map<String, String>> inputs);
+}

+ 5 - 0
algorithm/src/main/java/org/algorithm/core/DataSet.java

@@ -0,0 +1,5 @@
+package org.algorithm.core;
+
+public interface DataSet {
+
+}

+ 111 - 0
algorithm/src/main/java/org/algorithm/core/bayes/AlgorithmBayesExecutor.java

@@ -0,0 +1,111 @@
+package org.algorithm.core.bayes;
+
+import org.algorithm.core.AlgorithmExecutor;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/7/19/019 11:21
+ * @Description:
+ */
+public class AlgorithmBayesExecutor extends AlgorithmExecutor {
+    private Map<String, List<String>> rdnFeaturesMap = null;
+
+    private List<String> symptomsList = null;
+
+    /**每个特征在文档中出现计数情况*/
+    private Map<String, Float> allFeatureCntMap = new HashMap<>(10, 0.8f);
+    /**录入条件特征在文档中出现的情况*/
+    private Map<String, Float> searchFeatureCntMap = new HashMap<>(10, 0.8f);
+    /**总文档计数*/
+    private int rdnCnt = 0;
+
+    public Map<String, Float> start(HttpServletRequest request, String[] symptoms) {
+        this.featureThreshold(rdnFeaturesMap, symptoms);
+
+        Map<String, Float> result = new HashMap<String, Float>();
+
+        //计算贝叶斯分母 p_s = P(S1,S2|D1) + P(S1,S2|D2) + P(S1,S2|D3) ......        (p_d * p_s_i) / p_s
+        float p_s = 0.0f;
+        float p_d = 0.0f;
+        float p_s_i = 0.0f;
+        for (Map.Entry<String, Float> entry : searchFeatureCntMap.entrySet()) {
+            p_d = allFeatureCntMap.get(entry.getKey()) / rdnCnt;
+            p_s_i = entry.getValue() / allFeatureCntMap.get(entry.getKey());
+            p_s += p_d * p_s_i;
+        }
+
+        for (Map.Entry<String, Float> entry : searchFeatureCntMap.entrySet()) {
+            p_d = allFeatureCntMap.get(entry.getKey()) / rdnCnt;
+            p_s_i = entry.getValue() / allFeatureCntMap.get(entry.getKey());
+            result.put(entry.getKey(), (p_d * p_s_i) / p_s);
+        }
+        return result;
+    }
+
+    private void featureThreshold(Map<String, List<String>> rdnMap, String[] symptoms) {
+        this.rdnCnt = rdnMap.size();
+
+        List<String> featuresList = null;
+        for (Map.Entry<String, List<String>> entry : rdnMap.entrySet()) {
+            featuresList = rdnMap.get(entry.getKey());
+            /**保存特征出现的文档数*/
+            this.count(this.allFeatureCntMap, featuresList);
+
+            if (findFeature(entry.getKey(), symptoms)) {
+                this.count(searchFeatureCntMap, featuresList);
+            }
+        }
+    }
+
+
+    public boolean findFeature(String rdn, String[] symptoms) {
+        //入参有症状信息,必须包含入参中所有症状信息的文档才计算
+        if (!matchFeatures(rdnFeaturesMap, symptomsList, symptoms, rdn)) {
+            return false;
+        }
+        return true;
+    }
+
+    private boolean matchFeatures(Map<String, List<String>> featuresMap, List<String> docFeaturesList, String[] args, String rdn) {
+        if (args != null && args.length > 0) {
+            docFeaturesList = featuresMap.get(rdn);
+            if (docFeaturesList == null || docFeaturesList.size() == 0) {
+                return false;
+            } else {
+                //主症状不相同
+                if (args.length > 0 && !args[0].equals(docFeaturesList.get(0))) {
+                    return false;
+                }
+                for (String arg : args) {
+                    if (arg != null && !docFeaturesList.contains(arg)) {
+                        return false;
+                    }
+                }
+            }
+        }
+        return true;
+    }
+
+    private void count(Map<String, Float> cntMap, List<String> featuresList) {
+        for (String feature : featuresList) {
+            if (cntMap.get(feature) == null) {
+                cntMap.put(feature, 1f);
+            } else {
+                cntMap.put(feature, cntMap.get(feature) + 1);
+            }
+        }
+    }
+
+    public Map<String, Float> execute(Map<String, Map<String, String>> inputs) {
+        return null;
+    }
+
+    public void setRdnFeaturesMap(Map<String, List<String>> rdnFeaturesMap) {
+        this.rdnFeaturesMap = rdnFeaturesMap;
+    }
+}

+ 21 - 0
algorithm/src/main/java/org/algorithm/core/cnn/AlgorithmCNNExecutor.java

@@ -0,0 +1,21 @@
+package org.algorithm.core.cnn;
+
+import org.algorithm.core.cnn.model.Triad;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @ClassName org.algorithm.core.cnn.model.AlgorithmCNNExecutor
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/17/017 19:18
+ * @Version 1.0
+ **/
+public class AlgorithmCNNExecutor {
+    public List<Triad> execute(String content) {
+        List<Triad> triads = new ArrayList<>();
+        triads.add(new Triad());
+        return triads;
+    }
+}

+ 67 - 0
algorithm/src/main/java/org/algorithm/core/cnn/model/Lemma.java

@@ -0,0 +1,67 @@
+package org.algorithm.core.cnn.model;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @ClassName org.algorithm.core.cnn.model.Lemma
+ * @Description 词元信息
+ * @Author fyeman
+ * @Date 2019/1/17/017 19:15
+ * @Version 1.0
+ **/
+public class Lemma {
+    private String text;
+    private String position;
+    private int len;
+    private String property;
+
+    private List<Lemma> relationLemmas;
+
+    public String getText() {
+        return text;
+    }
+
+    public void setText(String text) {
+        this.text = text;
+    }
+
+    public String getPosition() {
+        return position;
+    }
+
+    public void setPosition(String position) {
+        this.position = position;
+    }
+
+    public int getLen() {
+        return len;
+    }
+
+    public void setLen(int len) {
+        this.len = len;
+    }
+
+    public String getProperty() {
+        return property;
+    }
+
+    public void setProperty(String property) {
+        this.property = property;
+    }
+
+    public List<Lemma> getRelationLemmas() {
+        return relationLemmas;
+    }
+
+    public void setRelationLemmas(List<Lemma> relationLemmas) {
+        this.relationLemmas = relationLemmas;
+    }
+
+    public void add(Lemma l) {
+        if (relationLemmas == null) {
+            relationLemmas = new ArrayList<>();
+        }
+        relationLemmas.add(l);
+    }
+}

+ 38 - 0
algorithm/src/main/java/org/algorithm/core/cnn/model/Triad.java

@@ -0,0 +1,38 @@
+package org.algorithm.core.cnn.model;
+
+/**
+ * @ClassName org.algorithm.core.cnn.Triad
+ * @Description 三元组关系
+ * @Author fyeman
+ * @Date 2019/1/17/017 19:14
+ * @Version 1.0
+ **/
+public class Triad {
+    private Lemma l_1;
+    private Lemma l_2;
+    private String relation;
+
+    public Lemma getL_1() {
+        return l_1;
+    }
+
+    public void setL_1(Lemma l_1) {
+        this.l_1 = l_1;
+    }
+
+    public Lemma getL_2() {
+        return l_2;
+    }
+
+    public void setL_2(Lemma l_2) {
+        this.l_2 = l_2;
+    }
+
+    public String getRelation() {
+        return relation;
+    }
+
+    public void setRelation(String relation) {
+        this.relation = relation;
+    }
+}

+ 29 - 0
algorithm/src/main/java/org/algorithm/core/neural/AlgorithmNeuralExecutor.java

@@ -0,0 +1,29 @@
+package org.algorithm.core.neural;
+
+import org.algorithm.core.AlgorithmExecutor;
+
+import java.util.Map;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/7/19/019 11:20
+ * @Description:
+ */
+public abstract class AlgorithmNeuralExecutor extends AlgorithmExecutor {
+    TensorflowModel model = null;
+
+    /**
+     * 执行
+     */
+    public Map<String, Float> execute(Map<String, Map<String, String>> inputs) {
+        return this.model.execute(inputs);   
+    }
+    
+    /**
+     * 关闭模型,释放资源
+     */
+    public void close() {
+        this.model.close();
+    }
+    
+}

+ 21 - 0
algorithm/src/main/java/org/algorithm/core/neural/DiagnosisPredictExecutor.java

@@ -0,0 +1,21 @@
+package org.algorithm.core.neural;
+
+/**
+ * 门诊诊断预测
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午2:32:19
+ * @Description:
+ */
+public class DiagnosisPredictExecutor extends AlgorithmNeuralExecutor {
+
+    /**
+     * 加载模型和数据集
+     */
+    public DiagnosisPredictExecutor() {
+        String modelVersion = "diagnosisPredict.version";
+
+        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+    }
+
+}

+ 22 - 0
algorithm/src/main/java/org/algorithm/core/neural/DiagnosisToLisExecutor.java

@@ -0,0 +1,22 @@
+package org.algorithm.core.neural;
+
+
+/**
+ * 门诊诊断预测
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午2:32:19
+ * @Description:
+ */
+public class DiagnosisToLisExecutor extends AlgorithmNeuralExecutor {
+
+    /**
+     * 加载模型和数据集
+     */
+    public DiagnosisToLisExecutor() {
+        String modelVersion = "diagnosisToLis.version";
+
+        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+    }
+
+}

+ 22 - 0
algorithm/src/main/java/org/algorithm/core/neural/DiagnosisToPacsExecutor.java

@@ -0,0 +1,22 @@
+package org.algorithm.core.neural;
+
+
+/**
+ * 门诊诊断预测
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午2:32:19
+ * @Description:
+ */
+public class DiagnosisToPacsExecutor extends AlgorithmNeuralExecutor {
+
+    /**
+     * 加载模型和数据集
+     */
+    public DiagnosisToPacsExecutor() {
+        String modelVersion = "diagnosisToPacs.version";
+
+        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+    }
+
+}

+ 21 - 0
algorithm/src/main/java/org/algorithm/core/neural/DiagnosisToSymptomExecutor.java

@@ -0,0 +1,21 @@
+package org.algorithm.core.neural;
+
+/**
+ * 门诊诊断预测
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午2:32:19
+ * @Description:
+ */
+public class DiagnosisToSymptomExecutor extends AlgorithmNeuralExecutor{
+
+    /**
+     * 加载模型和数据集
+     */
+    public DiagnosisToSymptomExecutor() {        
+        String modelVersion = "diagnosisToSymptom.version";
+                
+        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+        
+    }
+
+}

+ 22 - 0
algorithm/src/main/java/org/algorithm/core/neural/DiagnosisToVitalExecutor.java

@@ -0,0 +1,22 @@
+package org.algorithm.core.neural;
+
+
+/**
+ * 门诊诊断预测
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午2:32:19
+ * @Description:
+ */
+public class DiagnosisToVitalExecutor extends AlgorithmNeuralExecutor{
+
+    /**
+     * 加载模型和数据集
+     */
+    public DiagnosisToVitalExecutor() {
+        String modelVersion = "diagnosisToVital.version";
+        
+        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+        
+    }
+
+}

+ 23 - 0
algorithm/src/main/java/org/algorithm/core/neural/LisPredictExecutor.java

@@ -0,0 +1,23 @@
+package org.algorithm.core.neural;
+
+
+/**
+ * 门诊诊断预测
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午2:32:19
+ * @Description:
+ */
+public class LisPredictExecutor extends AlgorithmNeuralExecutor {
+
+    /**
+     * 加载模型和数据集
+     */
+    public LisPredictExecutor() {
+        String modelVersion = "lisPredict.version";
+
+        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+    }
+
+
+}

+ 22 - 0
algorithm/src/main/java/org/algorithm/core/neural/PacsPredictExecutor.java

@@ -0,0 +1,22 @@
+package org.algorithm.core.neural;
+
+
+/**
+ * 门诊诊断预测
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午2:32:19
+ * @Description:
+ */
+public class PacsPredictExecutor extends AlgorithmNeuralExecutor {
+
+    /**
+     * 加载模型和数据集
+     */
+    public PacsPredictExecutor() {
+        String modelVersion = "pacsPredict.version";
+
+        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+    }
+
+}

+ 22 - 0
algorithm/src/main/java/org/algorithm/core/neural/SymptomPredictExecutor.java

@@ -0,0 +1,22 @@
+package org.algorithm.core.neural;
+
+
+/**
+ * 门诊诊断预测
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午2:32:19
+ * @Description:
+ */
+public class SymptomPredictExecutor extends AlgorithmNeuralExecutor {
+
+    /**
+     * 加载模型和数据集
+     */
+    public SymptomPredictExecutor() {
+        String modelVersion = "symptomPredict.version";
+
+        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+    }
+
+}

+ 44 - 0
algorithm/src/main/java/org/algorithm/core/neural/TensorFlowModelLoadFactory.java

@@ -0,0 +1,44 @@
+package org.algorithm.core.neural;
+
+import org.algorithm.core.neural.dataset.NNDataSet;
+import org.algorithm.core.neural.dataset.NNDataSetImpl;
+import org.diagbot.pub.utils.PropertiesUtil;
+
+/**
+ * Tensorlflow 模型加载工厂
+ * @Author: bijl
+ * @Date: 2018年7月19日-下午7:28:58
+ * @Description:
+ */
+public class TensorFlowModelLoadFactory {
+    
+    /**
+     * 加载并创建模型类
+     * @param exportDir  模型保存地址
+     * @param inputOpName  输入op的名称
+     * @param outputOpName  输出op的名称
+     * @param dataSet     模型使用的数据集
+     * @return 模型
+     */
+    public static TensorflowModel create(String modelVersion) {
+        
+        
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+        
+        String inputOpName = "X";  // 统一输入op名称
+        String outputOpName = "softmax/softmax";  // 统一输出op名称
+        
+        // TODO:修改的地方
+//        NNDataSet dataSet = new NNDataSetImplNonParallel(modelVersion);  // 新模型
+        NNDataSet dataSet = new NNDataSetImpl(modelVersion);  // 老模型
+        
+        String modelPath =prop.getProperty("basicPath");  // 模型基本路径
+        modelVersion = prop.getProperty(modelVersion);
+        modelPath = modelPath.replace("model_version_replacement", modelVersion);  // 生成模型路径
+        
+        TensorflowModel tm = new TensorflowModel(modelPath, inputOpName, outputOpName,
+                dataSet);
+        return tm;
+    }
+
+}

+ 105 - 0
algorithm/src/main/java/org/algorithm/core/neural/TensorflowModel.java

@@ -0,0 +1,105 @@
+package org.algorithm.core.neural;
+
+import org.algorithm.core.neural.dataset.NNDataSet;
+import org.tensorflow.SavedModelBundle;
+import org.tensorflow.Session;
+import org.tensorflow.Tensor;
+
+import java.nio.FloatBuffer;
+import java.util.Map;
+
+/**
+ * tensorflow 模型类,要求单个样本是1维向量,而不是高维向量
+ * @Author: bijl
+ * @Date: 2018年7月19日-下午7:21:24
+ * @Description:
+ */
+public class TensorflowModel {
+    
+    private final String INPUT_OPERATION_NAME;   // 输入op的名称
+    private final String OUTPUT_OPERATION_NAME;  // 输出op的名称
+    private final int NUM_FEATURE;  // 特征个数
+    private final int NUM_LABEL;  //  标签(类别)个数
+    private SavedModelBundle bundle; // 模型捆绑
+    private Session session;  // 会话
+    private NNDataSet dataSet;  // 数据集
+    
+    /**
+     * 
+     * @param exportDir  模型保存地址
+     * @param inputOpName  输入op的名称
+     * @param outputOpName  输出op的名称
+     * @param dataSet  模型使用的数据集
+     */
+    public TensorflowModel(String exportDir, String inputOpName, String outputOpName, NNDataSet dataSet) {
+        this.INPUT_OPERATION_NAME = inputOpName;
+        this.OUTPUT_OPERATION_NAME = outputOpName;
+        this.dataSet = dataSet;
+        this.NUM_FEATURE = this.dataSet.getNumFeature();
+        this.NUM_LABEL = this.dataSet.getNumLabel();
+        this.init(exportDir);
+                
+    }
+    
+    /**
+     * 初始化:加载模型,获取会话。
+     * @param exportDir
+     */
+    public void init(String exportDir) {
+        /* load the model Bundle */
+        try {
+            this.bundle = SavedModelBundle.load(exportDir, "serve");
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        // create the session from the Bundle
+        this.session = bundle.session(); 
+    }
+    
+    /**
+     * 运行模型
+     * @param inputValue  输入值
+     * @param numExamples  样本个数
+     * @return 模型的输出
+     */
+    private float[][] run(float[] inputValues, int numExamples){
+        // TODO: 修改的地方
+//        long[] inputShape = {numExamples, this.NUM_FEATURE, 4, 1};  // 新模型
+        long[] inputShape = {numExamples, this.NUM_FEATURE};  // 老模型
+        Tensor<Float> inputTensor = Tensor.create(
+                inputShape,  
+                FloatBuffer.wrap(inputValues) 
+        );
+        return this.session.runner().feed(this.INPUT_OPERATION_NAME, inputTensor)
+                .feed("keep_prob", Tensor.create(1.0f, Float.class))  // dropout保留率
+                .fetch(this.OUTPUT_OPERATION_NAME).run().get(0)
+                .copyTo(new float[numExamples][this.NUM_LABEL]);
+    }
+    
+    
+    /**
+     * 运行模型,并将结果打包成目标格式
+     */
+    public Map<String, Float> execute(Map<String, Map<String, String>> inputs) {
+        float[] inputValues = this.dataSet.toFeatureVector(inputs);
+        float sum = 0;
+        for (float f : inputValues)
+            sum += f;
+        if(sum == 0)  // 如果输入没有有效特征,则直接返回null
+            return null;
+        
+        float[][] predict = this.run(inputValues, 1);  // 一次一个样本
+        return this.dataSet.wrap(predict);  
+    }
+    
+    
+    /**
+     * 关闭会话,释放资源
+     */
+    public void close() {
+        this.session.close();
+        this.bundle.close();
+    }
+
+}

+ 22 - 0
algorithm/src/main/java/org/algorithm/core/neural/VitalPredictExecutor.java

@@ -0,0 +1,22 @@
+package org.algorithm.core.neural;
+
+
+/**
+ * 门诊诊断预测
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午2:32:19
+ * @Description:
+ */
+public class VitalPredictExecutor extends AlgorithmNeuralExecutor {
+
+    /**
+     * 加载模型和数据集
+     */
+    public VitalPredictExecutor() {
+        String modelVersion = "vitalPredict.version";
+
+        this.model = TensorFlowModelLoadFactory.create(modelVersion);
+    }
+
+}

+ 89 - 0
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSet.java

@@ -0,0 +1,89 @@
+package org.algorithm.core.neural.dataset;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * 神经网络用数据处理模块
+ * @Author: bijl
+ * @Date: 2018年7月20日-下午4:01:34
+ * @Description:
+ */
+public abstract class NNDataSet {
+    protected final int NUM_FEATURE;
+    private final int NUM_LABEL;
+    protected final Map<String, Integer> FEATURE_DICT = new HashMap<>();
+    
+    // 新版本新加的三种关键词
+    protected final Map<String, Integer> PARTBODY_DICT = new HashMap<>();
+    protected final Map<String, Integer> PROPERTY_DICT = new HashMap<>();
+    protected final Map<String, Integer> DURATION_DICT = new HashMap<>();
+    
+    protected final Map<String, Integer> LABEL_DICT = new HashMap<>();
+    protected final Map<String, Integer> NEGATIVE_DICT = new HashMap<>();
+    private final String[] FEATURE_DICT_ARRAY;
+    private final String[] LABEL_DICT_ARRAY;
+
+
+    public NNDataSet(String modelAndVersion) {
+        this.readDict(modelAndVersion);
+        this.NUM_FEATURE = this.FEATURE_DICT.size();
+        this.NUM_LABEL = this.LABEL_DICT.size();
+        this.FEATURE_DICT_ARRAY = new String[this.NUM_FEATURE];
+        this.LABEL_DICT_ARRAY = new String[this.NUM_LABEL];
+        this.makeDictArr();
+    }
+    
+    /**
+     * 装外部输入转为特征向量
+     * @param inputs
+     * @return
+     */
+    public abstract float[] toFeatureVector(Map<String, Map<String, String>> inputs);
+
+    /**
+     * 读取特征和类别字典
+     */
+    public abstract void readDict(String modelAndVersion);
+    
+    /**
+     * 生成字典列表
+     */
+    private void makeDictArr() {
+        for (Map.Entry<String, Integer> entry : this.FEATURE_DICT.entrySet()) 
+            this.FEATURE_DICT_ARRAY[entry.getValue()] = entry.getKey();
+        
+        for (Map.Entry<String, Integer> entry : this.LABEL_DICT.entrySet()) 
+            this.LABEL_DICT_ARRAY[entry.getValue()] = entry.getKey();
+        
+    }
+
+    /**
+     * 打包模型输出结果给调用者
+     * 
+     * @param predict 模型输出
+     * @return
+     */
+    public Map<String, Float> wrap(float[][] predict) {
+        Map<String, Float> result = new HashMap<>();
+        for (int i=0; i<predict[0].length; i++) {  // 只返回一维向量
+            result.put(this.LABEL_DICT_ARRAY[i], predict[0][i]);
+        }
+        return result;
+    }
+
+    /**
+     * @return
+     */
+    public int getNumFeature() {
+        return this.NUM_FEATURE;
+    }
+
+    /**
+     * @return
+     */
+    public int getNumLabel() {
+        return this.NUM_LABEL;
+    }
+
+}

+ 171 - 0
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImpl.java

@@ -0,0 +1,171 @@
+package org.algorithm.core.neural.dataset;
+
+import org.algorithm.util.TextFileReader;
+import org.diagbot.pub.utils.PropertiesUtil;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * 门诊诊断推送用数据集
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月26日-上午10:19:43
+ * @Description:
+ */
+public class NNDataSetImpl extends NNDataSet {
+
+
+    public NNDataSetImpl(String modelAndVersion) {
+        super(modelAndVersion);
+    }
+
+
+    @Override
+    public float[] toFeatureVector(Map<String, Map<String, String>> inputs) {
+        float[] featureVector = new float[this.NUM_FEATURE];
+
+        Iterator<Entry<String, Map<String, String>>> entries = inputs.entrySet().iterator();
+
+        String featureName = "";
+        Integer position = -1;
+        Integer negative = 0;
+        // Integer partbodyValue = 0;
+        float positive_value = 1.0f;
+        float negative_value = -1.0f;
+        Map<String, String> featureValues = null;
+        // String partbody = null;
+        // String[] partbodys = null;
+        // String sn = null;
+
+        /**
+         * 数据方案设计
+         */
+        while (entries.hasNext()) {
+
+
+            Entry<String, Map<String, String>> entry = entries.next();
+            featureName = entry.getKey();
+            featureValues = entry.getValue();
+            position = this.FEATURE_DICT.get(featureName);
+            negative = NEGATIVE_DICT.get(featureValues.get("negative"));
+            // 突出主症状的数据方案
+            // sn = featureValues.get("sn");
+            // if("0".equals(sn)) {
+            // negative = negative * 10;
+            // }
+
+            if (position != null)
+                if (negative == 1)
+                    featureVector[position] = positive_value;
+                else if (negative == -1)
+                    featureVector[position] = negative_value;
+                else
+                    System.out.println("New Nagetive! This may lead to an error.");
+
+
+
+            /**
+             * 部位附属症状数据表示方案 partbodyValue = this.PARTBODY_DICT.get(featureValues.get("partbody"));
+             * if(partbodyValue != null) { value = 1.0f * partbodyValue /
+             * this.PARTBODY_DICT.get("NULL"); // 部位值表示 value = (float)(Math.round(value *
+             * 100000))/100000; // 保留5位有效数字 } value = negative * value; featureVector[position] =
+             * value;
+             * 
+             */
+
+        }
+
+        return featureVector;
+    }
+
+
+    /**
+     * 读取字典
+     */
+//     @Override
+//     public void readDict(String modelAndVersion) {
+//    
+//     PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+//     String model_version = prop.getProperty(modelAndVersion);
+//     model_version = model_version.trim();
+//    
+//     String url = "jdbc:mysql://192.168.2.235/diagbot-app?user=root&password=diagbot@20180822";
+//     MysqlConnector connector = new MysqlConnector(url);
+//     String querySql = "SELECT md._name, md._index, md.type_id " + "FROM model_dictionary AS md "
+//     + "WHERE md.belong_model = 'outpatient_model'";
+//    
+//     querySql = querySql.replace("outpatient_model", model_version);
+//     ResultSet rs = connector.query(querySql);
+//     try {
+//     while (rs.next()) {
+//     int type_id = rs.getInt("type_id");
+//     int _index = rs.getInt("_index");
+//     String _name = rs.getString("_name");
+//    
+//     if (type_id == 1)
+//     this.FEATURE_DICT.put(_name, _index);
+//     else if (type_id == 2)
+//     this.LABEL_DICT.put(_name, _index);
+//     else if (type_id == 8)
+//     this.NEGATIVE_DICT.put(_name, _index);
+//    
+//     }
+//    
+//     System.out.println("feature size:"+this.FEATURE_DICT.size());
+//    
+//     } catch (SQLException e) {
+//     e.printStackTrace();
+//     throw new RuntimeException("加载特征和类别字典失败");
+//     } finally {
+//     connector.close();
+//     }
+//    
+//     }
+
+    @Override
+    public void readDict(String modelAndVersion) {
+        
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+        String model_version = prop.getProperty(modelAndVersion);
+
+        String filePath = prop.getProperty("basicPath");  // 基本目录
+        filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
+        
+        filePath = filePath + "dictionaries.bin";  // 字典文件位置
+        
+        List<String> lines = TextFileReader.readLines(filePath);
+
+        boolean firstLine = true;
+        
+        String[] temp = null;
+        for (String line : lines) {
+            if (firstLine) {  // 去除第一行
+                firstLine = false;
+                continue;
+            }
+            
+            temp = line.split("\\|");
+            
+            if(temp[3].equals(model_version)){
+                int type_id = Integer.parseInt(temp[2]);
+                int _index = Integer.parseInt(temp[1]);
+                String _name = temp[0];
+
+                if (type_id == 1)
+                    this.FEATURE_DICT.put(_name, _index);
+                else if (type_id == 2)
+                    this.LABEL_DICT.put(_name, _index);
+                else if (type_id == 8)
+                    this.NEGATIVE_DICT.put(_name, _index);
+            }
+
+        }
+
+        System.out.println("feature size:" + this.FEATURE_DICT.size());
+
+    }
+
+}

+ 179 - 0
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImplNonParallel.java

@@ -0,0 +1,179 @@
+package org.algorithm.core.neural.dataset;
+
+import org.algorithm.util.TextFileReader;
+import org.diagbot.pub.utils.PropertiesUtil;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * 神经网络数据集,关键词非平行
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月26日-上午10:19:43
+ * @Description:
+ */
+public class NNDataSetImplNonParallel extends NNDataSet {
+
+
+    public NNDataSetImplNonParallel(String modelAndVersion) {
+        super(modelAndVersion);
+    }
+
+    
+    @Override
+    public float[] toFeatureVector(Map<String, Map<String, String>> inputs) {
+        // inputs {症状名:{partbody:部位名, property:属性名, duration:时间类别, sex:性别值, age:年龄值}
+        
+        int numCols = 4;
+        float[][] featureVector = new float[this.NUM_FEATURE][numCols];
+        float[] results = new float[this.NUM_FEATURE * numCols];
+        
+        // 初始化featureVector
+        for (int i=0; i<this.NUM_FEATURE; i++)
+            for (int j=0; j<numCols; j++)
+                featureVector[i][j] = 0.0f;
+
+        Iterator<Entry<String, Map<String, String>>> entries = inputs.entrySet().iterator();
+
+        String featureName = "";
+        Integer position = -1;
+        Map<String, String> featureValues = null;
+        Entry<String, Map<String, String>> entry = null;
+        
+        float[] values = new float[numCols];  //  收录一行的值
+        
+        boolean isFirst = true;
+        
+        float sex = -2.0f;
+        float age = -1.0f;
+        
+        /**
+         * 数据方案设计
+         */
+        while (entries.hasNext()) {
+
+            entry = entries.next();
+            featureName = entry.getKey();
+            featureValues = entry.getValue();
+            position = this.FEATURE_DICT.get(featureName);
+//            negative = this.NEGATIVE_DICT.get(featureValues.get("negative"));
+            
+            // 保留性别和年龄
+            if (isFirst) {
+                if (featureValues.get("sex") == "M")
+                    sex = 1.0f;
+                else if (featureValues.get("sex") == "F")
+                    sex = -1.0f;
+                else 
+                    sex = 0.0f;
+                
+                age = Float.parseFloat(featureValues.get("age"));
+                
+                isFirst = false;
+            }
+            
+            // 取值
+            values[0] = 1.0f;
+            
+            if (featureValues.get("partbody") == null || "".equals(featureValues.get("partbody"))
+                    || this.PARTBODY_DICT.get(featureValues.get("partbody")) == null)
+                values[1] = this.PARTBODY_DICT.get("EMPTY").floatValue();
+            else  // 值即是index
+                values[1] = this.PARTBODY_DICT.get(featureValues.get("partbody")).floatValue();
+            
+            if (featureValues.get("property") == null || "".equals(featureValues.get("property"))
+                    || this.PROPERTY_DICT.get(featureValues.get("property")) == null)
+                values[2] = this.PROPERTY_DICT.get("EMPTY").floatValue();
+            else
+                values[2] = this.PROPERTY_DICT.get(featureValues.get("property")).floatValue();  
+            
+            values[3] = this.DURATION_DICT.get(featureValues.get("duration"));
+            
+            if (position != null) {
+                for (int i=0; i<numCols;i++)
+                    featureVector[position][i] = values[i];
+            }
+        }
+        
+        // 添加年龄和性别
+        position = this.FEATURE_DICT.get("性别");
+        featureVector[position][0] = sex;
+        featureVector[position][1] = -1;
+        featureVector[position][2] = -1;
+        featureVector[position][3] = -1;
+        
+        position = this.FEATURE_DICT.get("年龄");
+        featureVector[position][0] = age;
+        featureVector[position][1] = -1;
+        featureVector[position][2] = -1;
+        featureVector[position][3] = -1;
+        
+     // 多维数组转一维数组(tensorflow需要),参看numpy或tensorflow reshape方法
+        int index = 0;
+        for (int i=0; i<this.NUM_FEATURE; i++)
+            for (int j=0; j<numCols; j++) {
+                results[index] = featureVector[i][j];
+                index += 1;
+            }
+                
+        return results;
+    }
+
+
+    /**
+     * 读取字典
+     */
+
+    @Override
+    public void readDict(String modelAndVersion) {
+        
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+        String model_version = prop.getProperty(modelAndVersion);
+
+        String filePath = prop.getProperty("basicPath");  // 基本目录
+        filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
+        
+        filePath = filePath + "dictionaries.bin";  // 字典文件位置
+        
+        List<String> lines = TextFileReader.readLines(filePath);
+
+        boolean firstLine = true;
+        
+        String[] temp = null;
+        for (String line : lines) {
+            if (firstLine) {  // 去除第一行
+                firstLine = false;
+                continue;
+            }
+            
+            temp = line.split("\\|");
+            
+            if(temp[3].equals(model_version)){
+                int type_id = Integer.parseInt(temp[2]);
+                int _index = Integer.parseInt(temp[1]);
+                String _name = temp[0];
+
+                if (type_id == 1)
+                    this.FEATURE_DICT.put(_name, _index);
+                else if (type_id == 2)
+                    this.LABEL_DICT.put(_name, _index);
+                else if (type_id == 8)
+                    this.NEGATIVE_DICT.put(_name, _index);
+                else if (type_id == 11)
+                    this.PARTBODY_DICT.put(_name, _index);
+                else if (type_id == 12)
+                    this.PROPERTY_DICT.put(_name, _index);
+                else if (type_id == 13)
+                    this.DURATION_DICT.put(_name, _index);
+            }
+
+        }
+
+        System.out.println("feature size:" + this.FEATURE_DICT.size());
+
+    }
+
+}

+ 72 - 0
algorithm/src/main/java/org/algorithm/core/neural/neuroph/NeurophLearning.java

@@ -0,0 +1,72 @@
+package org.algorithm.core.neural.neuroph;
+
+import org.neuroph.core.data.DataSet;
+import org.neuroph.core.data.DataSetRow;
+import org.neuroph.core.events.LearningEvent;
+import org.neuroph.core.events.LearningEventListener;
+import org.neuroph.core.learning.LearningRule;
+import org.neuroph.nnet.MultiLayerPerceptron;
+import org.neuroph.nnet.learning.BackPropagation;
+import org.neuroph.util.TransferFunctionType;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/7/31/031 19:06
+ * @Description:
+ */
+public class NeurophLearning implements LearningEventListener {
+
+    private List<String> allInputNameList = new ArrayList<>();
+    private List<String> allOutputNameList = new ArrayList<>();
+
+    public static void main(String args[]) {
+        NeurophLearning learning = new NeurophLearning();
+        DataSet trainingDataSet = new DataSet(learning.allInputNameList.size(), learning.allOutputNameList.size());
+        learning.initDataSet(trainingDataSet);
+        learning.train(trainingDataSet);
+    }
+
+    public void train(DataSet trainingDataSet) {
+        // 转移函数采用sigmoid,也可以用tanh之类的
+        // 三个层,输入错两个单元,隐层3个单元,输出层1个单元
+        MultiLayerPerceptron multiLayerPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, allInputNameList.size(), 100, allOutputNameList.size());
+        //反向误差传播
+        multiLayerPerceptron.setLearningRule(new BackPropagation());
+
+        LearningRule learningRule = multiLayerPerceptron.getLearningRule();
+        learningRule.addListener(this);
+
+        // 训练XOR集
+        System.out.println("XOR集训练神经网络...");
+        multiLayerPerceptron.learn(trainingDataSet);
+
+        test(multiLayerPerceptron, trainingDataSet);
+
+    }
+
+    public static void test(MultiLayerPerceptron multiLayerPerceptron, DataSet trainingDataSet) {
+        for(DataSetRow testSetRow : trainingDataSet.getRows()) {
+            multiLayerPerceptron.setInput(testSetRow.getInput());
+            multiLayerPerceptron.calculate();
+            double[] networkOutput = multiLayerPerceptron.getOutput();
+
+            System.out.print("Input: " + Arrays.toString( testSetRow.getInput() ) );
+            System.out.println(" Output: " + Arrays.toString( networkOutput) );
+        }
+    }
+
+    @Override
+    public void handleLearningEvent(LearningEvent event) {
+        BackPropagation bp = (BackPropagation)event.getSource();
+        if (event.getEventType() != LearningEvent.Type.LEARNING_STOPPED)
+            System.out.println(bp.getCurrentIteration() + ". iteration : "+ bp.getTotalNetworkError());
+    }
+
+    public void initDataSet(DataSet trainingDataSet) {
+    }
+
+}

+ 89 - 0
algorithm/src/main/java/org/algorithm/factory/AlgorithmFactory.java

@@ -0,0 +1,89 @@
+package org.algorithm.factory;
+
+import org.algorithm.core.AlgorithmExecutor;
+import org.algorithm.core.bayes.AlgorithmBayesExecutor;
+import org.algorithm.core.neural.*;
+import org.algorithm.util.AlgorithmClassify;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/7/19/019 10:27
+ * @Description:
+ */
+public class AlgorithmFactory {
+    private static AlgorithmBayesExecutor algorithmBayesExecutorInstance = null;
+
+    private static AlgorithmNeuralExecutor algorithmNeuralExecutorInstance = null;
+
+    private static DiagnosisPredictExecutor outpatientExecutorInstance = null;
+
+    private static SymptomPredictExecutor symptomPredictExcutorInstance = null;
+
+    private static VitalPredictExecutor vitalPredictExcutorInstance = null;
+
+    private static PacsPredictExecutor pacsPredictExcutorInstance = null;
+
+    private static LisPredictExecutor lisPredictExcutorInstance = null;
+
+    private static DiagnosisToSymptomExecutor diagnosisToSymptomExecutorInstance = null;
+
+    private static DiagnosisToVitalExecutor diagnosisToVitalExecutorInstance = null;
+
+    private static DiagnosisToLisExecutor diagnosisToLisExecutorInstance = null;
+
+    private static DiagnosisToPacsExecutor diagnosisToPacsExecutorInstance = null;
+
+    public static AlgorithmExecutor getInstance(AlgorithmClassify classify) {
+        try {
+            switch (classify) {
+                case NEURAL:
+                    algorithmNeuralExecutorInstance = (AlgorithmNeuralExecutor) create(algorithmNeuralExecutorInstance, AlgorithmNeuralExecutor.class);
+                    return algorithmNeuralExecutorInstance;
+                case BAYES:
+                    algorithmBayesExecutorInstance = (AlgorithmBayesExecutor) create(algorithmBayesExecutorInstance, AlgorithmBayesExecutor.class);
+                    return algorithmBayesExecutorInstance;
+                case NEURAL_DIAG:
+                    outpatientExecutorInstance = (DiagnosisPredictExecutor) create(outpatientExecutorInstance, DiagnosisPredictExecutor.class);
+                    return outpatientExecutorInstance;
+                case NEURAL_SYMPTOM:
+                    symptomPredictExcutorInstance = (SymptomPredictExecutor) create(symptomPredictExcutorInstance, SymptomPredictExecutor.class);
+                    return symptomPredictExcutorInstance;
+                case NEURAL_VITAL:
+                    vitalPredictExcutorInstance = (VitalPredictExecutor) create(vitalPredictExcutorInstance, VitalPredictExecutor.class);
+                    return vitalPredictExcutorInstance;
+                case NEURAL_LIS:
+                    lisPredictExcutorInstance = (LisPredictExecutor) create(lisPredictExcutorInstance, LisPredictExecutor.class);
+                    return lisPredictExcutorInstance;
+                case NEURAL_PACS:
+                    pacsPredictExcutorInstance = (PacsPredictExecutor) create(pacsPredictExcutorInstance, PacsPredictExecutor.class);
+                    return pacsPredictExcutorInstance;
+                case NEURAL_DIAG_SYMPTOM:
+                    diagnosisToSymptomExecutorInstance = (DiagnosisToSymptomExecutor) create(diagnosisToSymptomExecutorInstance, DiagnosisToSymptomExecutor.class);
+                    return diagnosisToSymptomExecutorInstance;
+                case NEURAL_DIAG_VITAL:
+                    diagnosisToVitalExecutorInstance = (DiagnosisToVitalExecutor) create(diagnosisToVitalExecutorInstance, DiagnosisToVitalExecutor.class);
+                    return diagnosisToVitalExecutorInstance;
+                case NEURAL_DIAG_LIS:
+                    diagnosisToLisExecutorInstance = (DiagnosisToLisExecutor) create(diagnosisToLisExecutorInstance, DiagnosisToLisExecutor.class);
+                    return diagnosisToLisExecutorInstance;
+                case NEURAL_DIAG_PACS:
+                    diagnosisToPacsExecutorInstance = (DiagnosisToPacsExecutor) create(diagnosisToPacsExecutorInstance, DiagnosisToPacsExecutor.class);
+                    return diagnosisToPacsExecutorInstance;
+            }
+        } catch (InstantiationException inst) {
+            inst.printStackTrace();
+        } catch (IllegalAccessException ille) {
+            ille.printStackTrace();
+        }
+        return null;
+    }
+
+    private static Object create(Object obj, Class cls) throws InstantiationException, IllegalAccessException {
+        if (obj == null) {
+            synchronized (cls) {
+                obj = cls.newInstance();
+            }
+        }
+        return obj;
+    }
+}

+ 54 - 0
algorithm/src/main/java/org/algorithm/test/HelloTF.java

@@ -0,0 +1,54 @@
+package org.algorithm.test;
+
+//Invoke in Java
+
+import org.tensorflow.SavedModelBundle;
+import org.tensorflow.Session;
+import org.tensorflow.Tensor;
+import org.tensorflow.TensorFlow;
+
+import java.nio.FloatBuffer;
+
+/**
+ * java 调用Python训练的模型
+ * @Author: bijl
+ * @Date: 2018年7月18日-下午1:34:45
+ * @Description:
+ */
+public class HelloTF {
+
+    public static void main(String[] args) {
+        // good idea to print the version number, 1.2.0 as of this writing
+        System.out.println(TensorFlow.version());
+        final int NUM_PREDICTIONS = 1;
+        long[] xShape = {2, 4};
+        
+        // 所有输入值会以缓冲的方式放到一个列表中,Tensor对象在根据shape切开。
+        float[] xValue = {4.5f,2.3f,1.3f,0.3f,6.6f,2.9f,4.6f,1.3f};
+
+        /* load the model Bundle */
+        SavedModelBundle b = SavedModelBundle.load("/tmp/model", "serve");
+
+        // create the session from the Bundle
+        Session sess = b.session();
+        
+        // create an input Tensor, value = 2.0f
+        Tensor x = Tensor.create(
+                xShape ,  // shape
+                FloatBuffer.wrap(xValue)  // value
+        );
+
+        // run the model and get the result, 4.0f.
+        float[][] y = sess.runner()  // 获取一个runner对象 
+                .feed("x", x)   // feed,'X':x, 等同于python中的feed流
+                .fetch("softmax")     //  要获取的tensor,通过名字识别,可以添加多个
+                .run()          //  运行,会返回一个list of tensor
+                .get(0)         // list 的方法
+                .copyTo(new float[2][3]);  // tensor的方法:类型转换
+
+        // print out the result.
+        for (int i = 0; i<y.length;i++)
+            for (int j=0;j<y[i].length;j++)
+                System.out.println(y[i][j]);
+    }
+}

+ 67 - 0
algorithm/src/main/java/org/algorithm/test/NNDataSetImplNonPatallelTest.java

@@ -0,0 +1,67 @@
+package org.algorithm.test;
+
+import org.algorithm.core.neural.dataset.NNDataSet;
+import org.algorithm.core.neural.dataset.NNDataSetImplNonParallel;
+
+import java.util.HashMap;
+import java.util.Map;
+
+;
+
+/**
+ * 测试门诊数据集
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午1:58:41
+ * @Description:
+ */
+public class NNDataSetImplNonPatallelTest {
+    
+    public static void main(String[] args) {
+        
+        //TODO:change modelAndVersion to test different models
+        String modelAndVersion = "diagnosisPredict.version";
+        
+        NNDataSet dataSet = new NNDataSetImplNonParallel(modelAndVersion);
+        
+        Map<String, Map<String, String>> aMap = new HashMap<>();
+        
+        Map<String, String> featureValues = new HashMap<>();
+        
+        featureValues.put("partbody", "左肺上叶");
+        featureValues.put("property", "压榨样");
+        featureValues.put("duration", "0");
+        featureValues.put("sex", "M");
+        featureValues.put("age", "34");
+        featureValues.put("negative", "有");
+        featureValues.put("sn", "0");
+        aMap.put("踝关节疼痛", featureValues);
+        
+        float[] featureVector = dataSet.toFeatureVector(aMap);
+        
+//        System.out.println(dataSet.getFEATURE_DICT());
+        float sum = 0;
+        for (int i=0; i<featureVector.length;i++)
+            sum += featureVector[i];
+        
+        System.out.println(sum);
+        
+        int index = 0;
+        
+        // 打印不为0的数据(即有效的数字)
+        while (index < featureVector.length) {
+            if (featureVector[index] != 0.0f) {
+               System.out.println(featureVector[index]);
+               System.out.println(featureVector[index+1]);
+               System.out.println(featureVector[index+2]);
+               System.out.println(featureVector[index+3]);
+               System.out.println("...........................................");
+               
+               index += 4;
+            }
+            else
+                index += 1;
+            
+        }
+    }
+
+}

+ 40 - 0
algorithm/src/main/java/org/algorithm/test/NNDataSetImplTest.java

@@ -0,0 +1,40 @@
+package org.algorithm.test;
+
+import org.algorithm.core.neural.dataset.NNDataSet;
+import org.algorithm.core.neural.dataset.NNDataSetImpl;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * 测试门诊数据集
+ * @Author: bijl
+ * @Date: 2018年7月26日-下午1:58:41
+ * @Description:
+ */
+public class NNDataSetImplTest {
+    
+    public static void main(String[] args) {
+        
+        //TODO:change modelAndVersion to test different models
+        String modelAndVersion = "diagnosis_to_symptom_1";
+        
+        NNDataSet dataSet = new NNDataSetImpl(modelAndVersion);
+        
+        Map<String, Map<String, String>> aMap = new HashMap<>();
+        
+        Map<String, String> featureValues = new HashMap<>();
+        
+        featureValues.put("partbody", "上皮");
+        featureValues.put("negative", "有");
+        featureValues.put("sn", "0");
+        aMap.put("踝关节疼痛", featureValues);
+        
+        float[] featureVector = dataSet.toFeatureVector(aMap);
+        
+        for (int i=0; i<featureVector.length;i++) {
+            System.out.println(i+":"+featureVector[i]);
+        }
+    }
+
+}

+ 96 - 0
algorithm/src/main/java/org/algorithm/test/TensorflowExcutorTest.java

@@ -0,0 +1,96 @@
+package org.algorithm.test;
+
+import org.algorithm.core.neural.SymptomPredictExecutor;
+import org.algorithm.util.Utils;
+
+import java.util.HashMap;
+import java.util.Map;
+
+
+public class TensorflowExcutorTest {
+
+    public static void main(String[] args) {
+        
+        //TODO:change VitalPredictExcutor to test different executors
+//        VitalPredictExecutor excutor = new VitalPredictExecutor();
+        SymptomPredictExecutor excutor = new SymptomPredictExecutor();
+//        LisPredictExecutor excutor = new LisPredictExecutor();
+//        DiagnosisPredictExecutor excutor = new DiagnosisPredictExecutor();
+//        PacsPredictExecutor excutor = new PacsPredictExecutor();
+//        DiagnosisToLisExecutor excutor = new DiagnosisToLisExecutor();
+//        DiagnosisToPacsExecutor excutor = new DiagnosisToPacsExecutor();
+//        DiagnosisToSymptomExecutor excutor = new DiagnosisToSymptomExecutor();
+//        DiagnosisToVitalExecutor excutor = new DiagnosisToVitalExecutor();
+
+        /**
+         * 测试病历,虚构的
+         */
+        Map<String, Map<String, String>> aMap = new HashMap<>();
+        Map<String, String> featureValues = new HashMap<>();
+
+        featureValues.put("negative", "有");
+//        aMap.put("夜间", featureValues);
+//        aMap.put("肢体活动障碍", featureValues);
+//        aMap.put("血压升高", featureValues);
+        
+//        aMap.put("体重下降", featureValues);
+//        aMap.put("恶心", featureValues);
+//        aMap.put("呕吐", featureValues);
+//        aMap.put("反酸", featureValues);
+//        aMap.put("肝脾", featureValues);
+//        aMap.put("胃", featureValues);
+//        aMap.put("腹平软", featureValues);
+//        aMap.put("巩膜", featureValues);
+//        aMap.put("大便无殊", featureValues);
+//        aMap.put("血吸虫病", featureValues);
+//        aMap.put("尿急", featureValues);
+//        aMap.put("泌尿系结石", featureValues);
+//        aMap.put("冠心病", featureValues);
+
+//        aMap.put("呕吐", featureValues);
+//        aMap.put("右上腹痛", featureValues);
+//        aMap.put("反复", featureValues);
+        
+//        aMap.put("红斑", featureValues);
+//        aMap.put("淋巴结", featureValues);
+//        aMap.put("神清", featureValues);
+//
+//        aMap.put("胀痛", featureValues);
+//        aMap.put("精神可", featureValues);
+//        aMap.put("胃纳可", featureValues);
+//        aMap.put("颜面部", featureValues);
+//
+//        aMap.put("头皮", featureValues);
+//        aMap.put("带状疱疹", featureValues);
+//        aMap.put("二便无殊", featureValues);
+//
+//        aMap.put("耳后", featureValues);
+//        aMap.put("持续性", featureValues);
+//        aMap.put("面部疼痛", featureValues);
+//        
+        featureValues.put("partbody", "左肺上叶");
+        featureValues.put("property", "压榨样");
+        featureValues.put("duration", "0");
+        featureValues.put("sex", "M");
+        featureValues.put("age", "34");
+        featureValues.put("negative", "有");
+        featureValues.put("sn", "0");
+        aMap.put("踝关节疼痛", featureValues);
+//        aMap.put("心悸", featureValues);
+//        aMap.put("气急", featureValues);
+//        aMap.put("头痛", featureValues);
+//        aMap.put("头晕", featureValues);
+
+        // 运行
+        Map<String, Float> result = excutor.execute(aMap);
+
+//        for (Entry<String, Float> entry : result.entrySet()) {
+//            System.out.println(entry.getKey() + " : " + entry.getValue());
+//        }
+//        System.out.println(result);
+        Utils.top_k(10, result);
+
+    }
+    
+   
+}

+ 23 - 0
algorithm/src/main/java/org/algorithm/test/Test.java

@@ -0,0 +1,23 @@
+package org.algorithm.test;
+
+
+public class Test {
+    
+    public static void main(String[] args) {
+        
+//        Integer aa = new Integer(53);
+//        Integer bb = new Integer(954);
+//        float xx = 1.0f;
+//        for(int i=1; i< 955; i++) {
+//            xx = (float)(Math.round(1.0f * i / bb*100000))/100000;
+//            System.out.println(i+":"+xx);
+//        }
+        String filePath = "/opt/models/model_version_replacement/model";
+        int index = filePath.indexOf("model_version_replacement");
+        
+        System.out.println(filePath.substring(0, index));
+        
+                
+    }
+
+}

+ 65 - 0
algorithm/src/main/java/org/algorithm/util/AlgorithmClassify.java

@@ -0,0 +1,65 @@
+package org.algorithm.util;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/7/19/019 10:37
+ * @Description:
+ */
+public enum AlgorithmClassify {
+    NEURAL("0"), BAYES("1"), EMERGENCY_NEURAL("2"), OUTPATIENT_NEURAL("3"),
+    NEURAL_SYMPTOM("11"),
+    NEURAL_DIAG("21"),
+    NEURAL_VITAL("31"),
+    NEURAL_LIS("41"),
+    NEURAL_PACS("51"),
+    NEURAL_TREAT("61"),
+    NEURAL_HISTORY("71"),
+    NEURAL_DIAG_SYMPTOM("111"),
+    NEURAL_DIAG_VITAL("131"),
+    NEURAL_DIAG_LIS("141"),
+    NEURAL_DIAG_PACS("151"),
+    NEURAL_DIAG_TREAT("161"),
+    NEURAL_DIAG_HISTORY("171");
+
+    private String value;
+
+    AlgorithmClassify(String value) {
+        this.value = value;
+    }
+
+    public String toString() {
+        return value;
+    }
+
+    public static AlgorithmClassify parse(String value) {
+        switch (value) {
+            case "11":
+                return AlgorithmClassify.NEURAL_SYMPTOM;
+            case "21":
+                return AlgorithmClassify.NEURAL_DIAG;
+            case "31":
+                return AlgorithmClassify.NEURAL_VITAL;
+            case "41":
+                return AlgorithmClassify.NEURAL_LIS;
+            case "51":
+                return AlgorithmClassify.NEURAL_PACS;
+            case "61":
+                return AlgorithmClassify.NEURAL_TREAT;
+            case "71":
+                return AlgorithmClassify.NEURAL_HISTORY;
+            case "111":
+                return AlgorithmClassify.NEURAL_DIAG_SYMPTOM;
+            case "131":
+                return AlgorithmClassify.NEURAL_DIAG_VITAL;
+            case "141":
+                return AlgorithmClassify.NEURAL_DIAG_LIS;
+            case "151":
+                return AlgorithmClassify.NEURAL_DIAG_PACS;
+            case "161":
+                return AlgorithmClassify.NEURAL_DIAG_TREAT;
+            case "171":
+                return AlgorithmClassify.NEURAL_DIAG_HISTORY;
+        }
+        return AlgorithmClassify.NEURAL_DIAG;
+    }
+}

+ 103 - 0
algorithm/src/main/java/org/algorithm/util/MysqlConnector.java

@@ -0,0 +1,103 @@
+package org.algorithm.util;
+
+import java.sql.*;
+import java.util.List;
+
+/**
+ * mysql链接类
+ * @Author: bijl
+ * @Date: 2018年7月12日-上午10:02:55
+ * @Description:
+ */
+public class MysqlConnector {
+    private Connection conn = null;
+    
+    /**
+     * 创建链接
+     * @param url
+     */
+    public MysqlConnector(String url) {
+        if (url.indexOf("utf8") == -1)  // 强制使用utf8编码格式
+            url = url.trim()+"&useUnicode=true&characterEncoding=utf8";
+        try {
+            Class.forName("com.mysql.jdbc.Driver");
+            this.conn = DriverManager.getConnection(url); 
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new RuntimeException("初始化数据库连接错误:");
+        }
+    }
+    
+    /**
+     * 执行sql语句
+     * @param sql
+     */
+    public void execute(String sql) {
+        Statement stmt = null;
+        try {
+            stmt = this.conn.createStatement();
+            stmt.execute(sql); // 执行SQL语句
+        } catch (SQLException e) {
+            e.printStackTrace();
+            throw new RuntimeException("执行错误:"+e.getMessage());
+        } 
+    }
+    
+    /**
+     * 执行sql语句
+     * @param sql
+     */
+    public void executeBatch(List<String> sqls) {
+        Statement stmt = null;
+        try {
+            stmt = this.conn.createStatement();
+            for (String sql : sqls) {
+                stmt.addBatch(sql);
+            }
+            stmt.executeBatch();
+            stmt.close();
+        } catch (SQLException e) {
+            e.printStackTrace();
+            throw new RuntimeException("执行错误:"+e.getMessage());
+        } 
+    }
+    
+    /**
+     * 查询
+     * @param querySql
+     * @return
+     */
+    public ResultSet query(String querySql) {
+        Statement stmt = null;
+        ResultSet rs = null; 
+        try {
+            stmt = this.conn.createStatement();
+            rs = stmt.executeQuery(querySql);
+        } catch (SQLException e) {
+            e.printStackTrace();
+        }
+       return rs;
+    }
+    
+    /*
+     * 关闭连接
+     */
+    public void close() {
+        try {
+            this.conn.close();
+        } catch (SQLException e) {
+            e.printStackTrace();
+        }
+    }
+    
+    /**
+     * 获取链接conn
+     * @return
+     */
+    public Connection getConn() {
+        return this.conn;
+    }
+
+}
+
+

+ 49 - 0
algorithm/src/main/java/org/algorithm/util/TextFileReader.java

@@ -0,0 +1,49 @@
+package org.algorithm.util;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * sql文件阅读器,生成sql语句
+ * 
+ * @Author: bijl
+ * @Date: 2018年7月12日-上午10:00:52
+ * @Description:
+ */
+public class TextFileReader {
+    public static List<String> readLines(String filePath) {
+
+        List<String> lines = new ArrayList<>();
+        InputStreamReader read = null;
+        BufferedReader bufferedReader = null;
+        try {
+            File file = new File(filePath);
+            if (file.isFile() && file.exists()) {
+                read = new InputStreamReader(new FileInputStream(file), "UTF-8");
+                bufferedReader = new BufferedReader(read);
+                String lineTxt = null;
+                
+                while ((lineTxt = bufferedReader.readLine()) != null) {
+                    lineTxt = lineTxt.trim();
+                    lines.add(lineTxt);
+                }
+
+                bufferedReader.close();
+                read.close();
+                
+            } else {
+                throw new RuntimeException("文件不存在"); // 读取文件错误则强制抛出异常,使中断
+            }
+
+        } catch (Exception e) {
+            throw new RuntimeException("读取文件错误:" + e.getMessage()); // 读取文件错误则强制抛出异常,使中断
+        }
+
+        return lines;
+    }
+    
+}

+ 36 - 0
algorithm/src/main/java/org/algorithm/util/Utils.java

@@ -0,0 +1,36 @@
+package org.algorithm.util;
+
+import java.util.Map;
+import java.util.Map.Entry;
+
+public class Utils {
+    
+    public static void top_k(int k, Map<String, Float> aMap) {
+        /**
+         * 暴力搜索map中按key降序排列的前k个键值对。
+         */
+        assert k > 0;
+        
+        float max_rate;
+        String max_name;
+        
+        for(int i=0;i<k;i++) {
+            max_rate = -1;
+            max_name = "";
+            for (Entry<String, Float> entry : aMap.entrySet()) {
+                if (entry.getValue() > max_rate) {
+                    max_rate = entry.getValue();
+                    max_name = entry.getKey();
+                }
+            }
+            
+            if (aMap.isEmpty())
+                break;
+            
+            System.out.println(max_name+":"+max_rate);
+            aMap.remove(max_name);
+        }
+        
+    }
+    
+}

+ 17 - 0
algorithm/src/main/resources/algorithm.properties

@@ -0,0 +1,17 @@
+################################ model basic url ###################################
+
+basicPath=E:/git/push/algorithm/src/main/models/model_version_replacement/model
+#basicPath=/opt/models/dev/models/model_version_replacement/model
+#basicPath=E:/models/model_version_replacement/model
+
+############################### current model version ################################
+diagnosisPredict.version=outpatient_556_IOE_1
+symptomPredict.version=symptom_predict_IOE_4
+vitalPredict.version=vital_predict_IOE_3
+lisPredict.version=lis_predict_IOE_3
+pacsPredict.version=pacs_predict_IOE_2
+
+diagnosisToSymptom.version=diagnosis_to_symptom_1
+diagnosisToLis.version=diagnosis_to_lis_1
+diagnosisToPacs.version=diagnosis_to_pacs_1
+diagnosisToVital.version=diagnosis_to_vital_1

File diff suppressed because it is too large
+ 174891 - 0
algorithm/src/main/resources/dictionaries.bin


+ 25 - 0
bigdata-web/.gitignore

@@ -0,0 +1,25 @@
+/target/
+!.mvn/wrapper/maven-wrapper.jar
+
+### STS ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
+
+### IntelliJ IDEA ###
+.idea
+*.iws
+*.iml
+*.ipr
+
+### NetBeans ###
+/nbproject/private/
+/build/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/

BIN
bigdata-web/.mvn/wrapper/maven-wrapper.jar


+ 1 - 0
bigdata-web/.mvn/wrapper/maven-wrapper.properties

@@ -0,0 +1 @@
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip

+ 286 - 0
bigdata-web/mvnw

@@ -0,0 +1,286 @@
+#!/bin/sh
+# ----------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ----------------------------------------------------------------------------
+
+# ----------------------------------------------------------------------------
+# Maven2 Start Up Batch script
+#
+# Required ENV vars:
+# ------------------
+#   JAVA_HOME - location of a JDK home dir
+#
+# Optional ENV vars
+# -----------------
+#   M2_HOME - location of maven2's installed home dir
+#   MAVEN_OPTS - parameters passed to the Java VM when running Maven
+#     e.g. to debug Maven itself, use
+#       set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+#   MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+# ----------------------------------------------------------------------------
+
+if [ -z "$MAVEN_SKIP_RC" ] ; then
+
+  if [ -f /etc/mavenrc ] ; then
+    . /etc/mavenrc
+  fi
+
+  if [ -f "$HOME/.mavenrc" ] ; then
+    . "$HOME/.mavenrc"
+  fi
+
+fi
+
+# OS specific support.  $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+mingw=false
+case "`uname`" in
+  CYGWIN*) cygwin=true ;;
+  MINGW*) mingw=true;;
+  Darwin*) darwin=true
+    # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
+    # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
+    if [ -z "$JAVA_HOME" ]; then
+      if [ -x "/usr/libexec/java_home" ]; then
+        export JAVA_HOME="`/usr/libexec/java_home`"
+      else
+        export JAVA_HOME="/Library/Java/Home"
+      fi
+    fi
+    ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+  if [ -r /etc/gentoo-release ] ; then
+    JAVA_HOME=`java-config --jre-home`
+  fi
+fi
+
+if [ -z "$M2_HOME" ] ; then
+  ## resolve links - $0 may be a link to maven's home
+  PRG="$0"
+
+  # need this for relative symlinks
+  while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+      PRG="$link"
+    else
+      PRG="`dirname "$PRG"`/$link"
+    fi
+  done
+
+  saveddir=`pwd`
+
+  M2_HOME=`dirname "$PRG"`/..
+
+  # make it fully qualified
+  M2_HOME=`cd "$M2_HOME" && pwd`
+
+  cd "$saveddir"
+  # echo Using m2 at $M2_HOME
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME=`cygpath --unix "$M2_HOME"`
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+  [ -n "$CLASSPATH" ] &&
+    CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# For Mingw, ensure paths are in UNIX format before anything is touched
+if $mingw ; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME="`(cd "$M2_HOME"; pwd)`"
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
+  # TODO classpath?
+fi
+
+if [ -z "$JAVA_HOME" ]; then
+  javaExecutable="`which javac`"
+  if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
+    # readlink(1) is not available as standard on Solaris 10.
+    readLink=`which readlink`
+    if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
+      if $darwin ; then
+        javaHome="`dirname \"$javaExecutable\"`"
+        javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
+      else
+        javaExecutable="`readlink -f \"$javaExecutable\"`"
+      fi
+      javaHome="`dirname \"$javaExecutable\"`"
+      javaHome=`expr "$javaHome" : '\(.*\)/bin'`
+      JAVA_HOME="$javaHome"
+      export JAVA_HOME
+    fi
+  fi
+fi
+
+if [ -z "$JAVACMD" ] ; then
+  if [ -n "$JAVA_HOME"  ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+      # IBM's JDK on AIX uses strange locations for the executables
+      JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+      JAVACMD="$JAVA_HOME/bin/java"
+    fi
+  else
+    JAVACMD="`which java`"
+  fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+  echo "Error: JAVA_HOME is not defined correctly." >&2
+  echo "  We cannot execute $JAVACMD" >&2
+  exit 1
+fi
+
+if [ -z "$JAVA_HOME" ] ; then
+  echo "Warning: JAVA_HOME environment variable is not set."
+fi
+
+CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
+
+# traverses directory structure from process work directory to filesystem root
+# first directory with .mvn subdirectory is considered project base directory
+find_maven_basedir() {
+
+  if [ -z "$1" ]
+  then
+    echo "Path not specified to find_maven_basedir"
+    return 1
+  fi
+
+  basedir="$1"
+  wdir="$1"
+  while [ "$wdir" != '/' ] ; do
+    if [ -d "$wdir"/.mvn ] ; then
+      basedir=$wdir
+      break
+    fi
+    # workaround for JBEAP-8937 (on Solaris 10/Sparc)
+    if [ -d "${wdir}" ]; then
+      wdir=`cd "$wdir/.."; pwd`
+    fi
+    # end of workaround
+  done
+  echo "${basedir}"
+}
+
+# concatenates all lines of a file
+concat_lines() {
+  if [ -f "$1" ]; then
+    echo "$(tr -s '\n' ' ' < "$1")"
+  fi
+}
+
+BASE_DIR=`find_maven_basedir "$(pwd)"`
+if [ -z "$BASE_DIR" ]; then
+  exit 1;
+fi
+
+##########################################################################################
+# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+# This allows using the maven wrapper in projects that prohibit checking in binary data.
+##########################################################################################
+if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Found .mvn/wrapper/maven-wrapper.jar"
+    fi
+else
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
+    fi
+    jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
+    while IFS="=" read key value; do
+      case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
+      esac
+    done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Downloading from: $jarUrl"
+    fi
+    wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
+
+    if command -v wget > /dev/null; then
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Found wget ... using wget"
+        fi
+        wget "$jarUrl" -O "$wrapperJarPath"
+    elif command -v curl > /dev/null; then
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Found curl ... using curl"
+        fi
+        curl -o "$wrapperJarPath" "$jarUrl"
+    else
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Falling back to using Java to download"
+        fi
+        javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
+        if [ -e "$javaClass" ]; then
+            if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+                if [ "$MVNW_VERBOSE" = true ]; then
+                  echo " - Compiling MavenWrapperDownloader.java ..."
+                fi
+                # Compiling the Java class
+                ("$JAVA_HOME/bin/javac" "$javaClass")
+            fi
+            if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+                # Running the downloader
+                if [ "$MVNW_VERBOSE" = true ]; then
+                  echo " - Running MavenWrapperDownloader.java ..."
+                fi
+                ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
+            fi
+        fi
+    fi
+fi
+##########################################################################################
+# End of extension
+##########################################################################################
+
+export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
+if [ "$MVNW_VERBOSE" = true ]; then
+  echo $MAVEN_PROJECTBASEDIR
+fi
+MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME=`cygpath --path --windows "$M2_HOME"`
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+  [ -n "$CLASSPATH" ] &&
+    CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+  [ -n "$MAVEN_PROJECTBASEDIR" ] &&
+    MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
+fi
+
+WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+exec "$JAVACMD" \
+  $MAVEN_OPTS \
+  -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
+  "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
+  ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"

+ 161 - 0
bigdata-web/mvnw.cmd

@@ -0,0 +1,161 @@
+@REM ----------------------------------------------------------------------------
+@REM Licensed to the Apache Software Foundation (ASF) under one
+@REM or more contributor license agreements.  See the NOTICE file
+@REM distributed with this work for additional information
+@REM regarding copyright ownership.  The ASF licenses this file
+@REM to you under the Apache License, Version 2.0 (the
+@REM "License"); you may not use this file except in compliance
+@REM with the License.  You may obtain a copy of the License at
+@REM
+@REM    http://www.apache.org/licenses/LICENSE-2.0
+@REM
+@REM Unless required by applicable law or agreed to in writing,
+@REM software distributed under the License is distributed on an
+@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@REM KIND, either express or implied.  See the License for the
+@REM specific language governing permissions and limitations
+@REM under the License.
+@REM ----------------------------------------------------------------------------
+
+@REM ----------------------------------------------------------------------------
+@REM Maven2 Start Up Batch script
+@REM
+@REM Required ENV vars:
+@REM JAVA_HOME - location of a JDK home dir
+@REM
+@REM Optional ENV vars
+@REM M2_HOME - location of maven2's installed home dir
+@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
+@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
+@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
+@REM     e.g. to debug Maven itself, use
+@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+@REM ----------------------------------------------------------------------------
+
+@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
+@echo off
+@REM set title of command window
+title %0
+@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
+@if "%MAVEN_BATCH_ECHO%" == "on"  echo %MAVEN_BATCH_ECHO%
+
+@REM set %HOME% to equivalent of $HOME
+if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
+
+@REM Execute a user defined script before this one
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
+@REM check for pre script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
+if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
+:skipRcPre
+
+@setlocal
+
+set ERROR_CODE=0
+
+@REM To isolate internal variables from possible post scripts, we use another setlocal
+@setlocal
+
+@REM ==== START VALIDATION ====
+if not "%JAVA_HOME%" == "" goto OkJHome
+
+echo.
+echo Error: JAVA_HOME not found in your environment. >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+:OkJHome
+if exist "%JAVA_HOME%\bin\java.exe" goto init
+
+echo.
+echo Error: JAVA_HOME is set to an invalid directory. >&2
+echo JAVA_HOME = "%JAVA_HOME%" >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+@REM ==== END VALIDATION ====
+
+:init
+
+@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
+@REM Fallback to current working directory if not found.
+
+set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
+IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
+
+set EXEC_DIR=%CD%
+set WDIR=%EXEC_DIR%
+:findBaseDir
+IF EXIST "%WDIR%"\.mvn goto baseDirFound
+cd ..
+IF "%WDIR%"=="%CD%" goto baseDirNotFound
+set WDIR=%CD%
+goto findBaseDir
+
+:baseDirFound
+set MAVEN_PROJECTBASEDIR=%WDIR%
+cd "%EXEC_DIR%"
+goto endDetectBaseDir
+
+:baseDirNotFound
+set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
+cd "%EXEC_DIR%"
+
+:endDetectBaseDir
+
+IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
+
+@setlocal EnableExtensions EnableDelayedExpansion
+for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
+@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
+
+:endReadAdditionalConfig
+
+SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
+set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
+set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
+FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
+	IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 
+)
+
+@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
+if exist %WRAPPER_JAR% (
+    echo Found %WRAPPER_JAR%
+) else (
+    echo Couldn't find %WRAPPER_JAR%, downloading it ...
+	echo Downloading from: %DOWNLOAD_URL%
+    powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
+    echo Finished downloading %WRAPPER_JAR%
+)
+@REM End of extension
+
+%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
+if ERRORLEVEL 1 goto error
+goto end
+
+:error
+set ERROR_CODE=1
+
+:end
+@endlocal & set ERROR_CODE=%ERROR_CODE%
+
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
+@REM check for post script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
+if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
+:skipRcPost
+
+@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
+if "%MAVEN_BATCH_PAUSE%" == "on" pause
+
+if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
+
+exit /B %ERROR_CODE%

+ 125 - 0
bigdata-web/pom.xml

@@ -0,0 +1,125 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>org.springframework.boot</groupId>
+		<artifactId>spring-boot-starter-parent</artifactId>
+		<version>2.1.1.RELEASE</version>
+		<relativePath/> <!-- lookup parent from repository -->
+	</parent>
+	<groupId>org.diagbot</groupId>
+	<artifactId>bigdata-web</artifactId>
+	<version>0.0.1-SNAPSHOT</version>
+	<name>bigdata-web</name>
+	<description>bigdata push for diagbot</description>
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+		<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+		<java.version>1.8</java.version>
+		<druid.version>1.0.23</druid.version>
+		<mybatis.version>1.1.1</mybatis.version>
+		<mysql.version>5.1.38</mysql.version>
+		<swagger2.version>2.7.0</swagger2.version>
+	</properties>
+
+	<dependencies>
+		<dependency>
+			<groupId>org.diagbot</groupId>
+			<artifactId>public</artifactId>
+			<version>1.0.0</version>
+		</dependency>
+
+		<dependency>
+			<groupId>org.diagbot</groupId>
+			<artifactId>algorithm</artifactId>
+			<version>1.0.0</version>
+		</dependency>
+
+        <dependency>
+            <groupId>org.diagbot</groupId>
+            <artifactId>nlp</artifactId>
+            <version>1.0.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.diagbot</groupId>
+            <artifactId>common-service</artifactId>
+            <version>1.0.0</version>
+        </dependency>
+
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter-web</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter-test</artifactId>
+			<scope>test</scope>
+		</dependency>
+
+        <dependency>
+            <groupId>org.mybatis.spring.boot</groupId>
+            <artifactId>mybatis-spring-boot-starter</artifactId>
+            <version>1.3.2</version>
+        </dependency>
+        <!-- 分页插件 -->
+        <dependency>
+            <groupId>com.github.pagehelper</groupId>
+            <artifactId>pagehelper-spring-boot-starter</artifactId>
+            <version>1.2.5</version>
+        </dependency>
+
+		<dependency>
+			<groupId>mysql</groupId>
+			<artifactId>mysql-connector-java</artifactId>
+			<version>${mysql.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>com.alibaba</groupId>
+			<artifactId>druid</artifactId>
+			<version>${druid.version}</version>
+		</dependency>
+
+		<dependency>
+			<groupId>io.springfox</groupId>
+			<artifactId>springfox-swagger2</artifactId>
+			<version>${swagger2.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>io.springfox</groupId>
+			<artifactId>springfox-swagger-ui</artifactId>
+			<version>${swagger2.version}</version>
+		</dependency>
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.springframework.boot</groupId>
+				<artifactId>spring-boot-maven-plugin</artifactId>
+			</plugin>
+		</plugins>
+
+		<resources>
+			<resource>
+				<directory>src/main/java</directory>
+				<includes>
+					<include>**/*.properties</include>
+					<include>**/*.xml</include>
+					<include>**/*.json</include>
+				</includes>
+				<filtering>false</filtering>
+			</resource>
+			<resource>
+				<directory>src/main/resources</directory>
+				<includes>
+					<include>**/*.yml</include>
+				</includes>
+			</resource>
+		</resources>
+        <finalName>bigdata-web</finalName>
+	</build>
+</project>

+ 18 - 0
bigdata-web/src/main/java/org/diagbot/BigdataWebApplication.java

@@ -0,0 +1,18 @@
+package org.diagbot;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.web.servlet.ServletComponentScan;
+import org.springframework.context.annotation.ComponentScan;
+
+@SpringBootApplication
+@ComponentScan(basePackages = "org.diagbot")
+@ServletComponentScan(basePackages = "org.diagbot.bigdata.common")
+public class BigdataWebApplication {
+
+	public static void main(String[] args) {
+		SpringApplication.run(BigdataWebApplication.class, args);
+	}
+
+}
+

+ 128 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/common/ApplicationCacheUtil.java

@@ -0,0 +1,128 @@
+package org.diagbot.bigdata.common;
+
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.bigdata.util.BigDataConstants;
+import org.diagbot.common.dao.model.StandardInfo;
+import org.diagbot.common.service.StandardInfoService;
+import org.diagbot.nlp.util.Constants;
+import org.diagbot.nlp.util.NegativeEnum;
+
+import javax.servlet.ServletContext;
+import java.util.*;
+
+public class ApplicationCacheUtil {
+
+    public void putStandardInfoContext(ServletContext servletContext, StandardInfoService standardInfoService) {
+        Map<String, Object> map = new HashMap<>(10, 0.8f);
+        //词典疾病名称缓存
+        map.put("categoryId", NegativeEnum.DISEASE.toString());
+        this.putStandardInfoNameContext(servletContext, standardInfoService, map, BigDataConstants.standard_info_diag_cache);
+        //症状名称缓存
+        map.put("categoryId", NegativeEnum.SYMPTOM.toString());
+        this.putStandardInfoNameContext(servletContext, standardInfoService, map, BigDataConstants.standard_info_symptom_cache);
+        //同义词缓存
+        this.putStandardInfoRelationNameContext(servletContext, standardInfoService, BigDataConstants.standard_info_relation_type_2, BigDataConstants.standard_info_synonym_cache);
+        //分类关系
+        this.putStandardInfoRelationNameContext(servletContext, standardInfoService, BigDataConstants.standard_info_relation_type_3, BigDataConstants.standard_info_type_cache);
+        //分类关系树形结构存储    暂时只支持三级结构
+        Map<String, String> standardInfoTypeMap = (Map<String, String>) servletContext.getAttribute(BigDataConstants.standard_info_type_cache);
+
+        Map<String, Node> nodeMap = new HashMap<>();
+        for (Map.Entry<String, String> entry : standardInfoTypeMap.entrySet()) {
+            Node parent_node = nodeMap.get(entry.getValue());
+            if (parent_node == null) {
+                parent_node = new Node();
+                parent_node.setName(entry.getValue());
+            }
+            Node child_node = nodeMap.get(entry.getKey());
+            if (child_node == null) {
+                child_node = new Node();
+                child_node.setName(entry.getKey());
+            }
+            parent_node.add(parent_node, child_node);
+            nodeMap.put(entry.getValue(), parent_node);
+            nodeMap.put(entry.getKey(), child_node);
+        }
+        servletContext.setAttribute(BigDataConstants.standard_info_type_tree_cache, nodeMap);
+
+    }
+
+    public void putStandardInfoNameContext(ServletContext servletContext, StandardInfoService standardInfoService, Map<String, Object> map, String key) {
+        Map<String, String> standardInfoMap = new HashMap<>(10, 0.8f);
+        List<StandardInfo> list = standardInfoService.selectList(map);
+        for (StandardInfo standardInfo : list) {
+            standardInfoMap.put(standardInfo.getName(), standardInfo.getName());
+        }
+        servletContext.setAttribute(key, standardInfoMap);
+    }
+
+    public void putStandardInfoRelationNameContext(ServletContext servletContext, StandardInfoService standardInfoService, String relationType, String key) {
+        Map<String, String> standardInfoSynonymMap = new HashMap<>(10, 0.8f);
+        Map<String, Object> relationMap = new HashMap<>(10, 0.3f);
+        if (StringUtils.isNotEmpty(relationType)) {
+            relationMap.put("relationType", relationType);
+        }
+        List<StandardInfo> list = standardInfoService.selectNotNullRelationTerm(relationMap);
+        for (StandardInfo standardInfo : list) {
+            if (StringUtils.isNotEmpty(standardInfo.getRelationName())) {
+                standardInfoSynonymMap.put(standardInfo.getName(), standardInfo.getRelationName());
+            }
+        }
+        servletContext.setAttribute(key, standardInfoSynonymMap);
+    }
+
+    private void addChilds(Node root, Map<String, String> standardInfoTypeMap, List<String> hasAddList) {
+        for (Node node : root.getChilds()) {
+            for (Map.Entry<String, String> entry : standardInfoTypeMap.entrySet()) {
+                if (node.getName().equals(entry.getValue())) {
+                    node.add(node, entry.getKey());
+                    hasAddList.add(entry.getKey());
+                }
+            }
+        }
+    }
+
+    public class Node {
+        private String name;
+        private Node parent;
+        private Set<Node> childs = new HashSet<>();
+
+        public Node add(Node parent, String name) {
+            Node node = new Node();
+            node.setName(name);
+            node.setParent(parent);
+            parent.getChilds().add(node);
+            return parent;
+        }
+
+        public Node add(Node parent, Node node) {
+            node.setParent(parent);
+            parent.getChilds().add(node);
+            return parent;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public void setName(String name) {
+            this.name = name;
+        }
+
+        public Set<Node> getChilds() {
+            return childs;
+        }
+
+        public void setChilds(Set<Node> childs) {
+            this.childs = childs;
+        }
+
+        public Node getParent() {
+            return parent;
+        }
+
+        public void setParent(Node parent) {
+            this.parent = parent;
+        }
+    }
+}

+ 97 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/common/InitListener.java

@@ -0,0 +1,97 @@
+package org.diagbot.bigdata.common;
+
+
+import org.diagbot.bigdata.dao.model.FeatureMapping;
+import org.diagbot.bigdata.dao.model.ResultMappingDiag;
+import org.diagbot.bigdata.dao.model.ResultMappingFilter;
+import org.diagbot.bigdata.service.FeatureMappingService;
+import org.diagbot.bigdata.service.ResultMappingDiagService;
+import org.diagbot.bigdata.service.ResultMappingFilterService;
+import org.diagbot.bigdata.util.BigDataConstants;
+import org.diagbot.common.service.StandardInfoService;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+import javax.servlet.annotation.WebListener;
+import java.util.*;
+
+@WebListener
+public class InitListener implements ServletContextListener {
+    @Autowired
+    StandardInfoService standardInfoService;
+    @Autowired
+    FeatureMappingService featureMappingService;
+    @Autowired
+    ResultMappingDiagService resultMappingDiagService;
+    @Autowired
+    ResultMappingFilterService resultMappingFilterService;
+
+    public void contextDestroyed(ServletContextEvent arg0) {
+
+    }
+
+    /**
+     * 开始初始化数据
+     *
+     * @return
+     */
+    public void contextInitialized(ServletContextEvent event) {
+        contextStandardLibraryInitialized(event);
+        contextFeatureMappingInitialized(event);
+        contextResultMappingDiagInitialized(event);
+        contextResultMappingFilterInitialized(event);
+    }
+
+    public void contextStandardLibraryInitialized(ServletContextEvent event) {
+        ApplicationCacheUtil applicationCacheUtil = new ApplicationCacheUtil();
+        applicationCacheUtil.putStandardInfoContext(event.getServletContext(), standardInfoService);
+    }
+
+    public void contextFeatureMappingInitialized(ServletContextEvent event) {
+        List<FeatureMapping> featureMappings = featureMappingService.selectList(new HashMap<>());
+        Map<String, String> mapping = new HashMap<>();
+        for (FeatureMapping featureMapping : featureMappings) {
+            mapping.put(featureMapping.getName(), featureMapping.getNameMapping());
+        }
+        event.getServletContext().setAttribute(BigDataConstants.feature_name_mapping, mapping);
+    }
+
+    public void contextResultMappingDiagInitialized(ServletContextEvent event) {
+        List<ResultMappingDiag> resultMappingDiags = resultMappingDiagService.selectList(new HashMap<>());
+
+        Map<String, String> mapping = new HashMap<>();
+        for (ResultMappingDiag resultMappingDiag : resultMappingDiags) {
+            mapping.put(resultMappingDiag.getDiagName(), resultMappingDiag.getDeptName());
+        }
+        event.getServletContext().setAttribute(BigDataConstants.result_mapping_diag, mapping);
+    }
+
+    public void contextResultMappingFilterInitialized(ServletContextEvent event) {
+        List<ResultMappingFilter> resultMappingFilters = resultMappingFilterService.selectList(new HashMap<>());
+
+        Map<String, Map<String, ResultMappingFilter>> mapping = new HashMap<>();
+        Map<String, ResultMappingFilter> filterMap = null;
+        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
+            filterMap = mapping.get(resultMappingFilter.getFeatureType());
+            if (filterMap == null) {
+                filterMap = new HashMap<>();
+            }
+            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
+            mapping.put(resultMappingFilter.getFeatureType(), filterMap);
+        }
+        event.getServletContext().setAttribute(BigDataConstants.result_mapping_filter, mapping);
+    }
+
+    private void put(Map<String, List<String>> map, String key, List<String> value, String ele) {
+        if (value == null) {
+            value = new ArrayList<>(Arrays.asList(ele));
+            map.put(key, value);
+        } else {
+            if (!value.contains(ele)) {
+                value.add(ele);
+                map.put(key, value);
+            }
+        }
+    }
+}

+ 47 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/common/RegionInterceptor.java

@@ -0,0 +1,47 @@
+/**   
+* @Company: 杭州朗通信息技术有限公司 
+* @Department: 系统软件部 
+* @Description: 朗通智能辅助诊疗系统 
+* @Address: 浙江省杭州市西湖区西斗门路3号 天堂软件园D-7B 
+*/
+package org.diagbot.bigdata.common;
+
+import org.springframework.web.servlet.HandlerInterceptor;
+import org.springframework.web.servlet.ModelAndView;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * 
+ * ClassName: RegionInterceptor 
+ * Function: API访问跨域问题 
+ * date: 2015年7月8日 下午2:09:35  
+ * 
+ * @author 楼辉荣(Fyeman) 
+ * @version 1.0 
+ * @since JDK 1.7
+ */
+public class RegionInterceptor implements HandlerInterceptor {
+	public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
+		response.setHeader("Access-Control-Allow-Credentials", "true"); //允许哪些url可以跨域请求到本域
+		response.setHeader("Access-Control-Allow-Origin", "*"); //允许哪些url可以跨域请求到本域
+		response.setHeader("Access-Control-Allow-Methods","POST"); //允许的请求方法,一般是GET,POST,PUT,DELETE,OPTIONS
+		response.setHeader("Access-Control-Allow-Headers","x-requested-with,content-type");
+		return true;
+	}
+
+	public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
+		response.setHeader("Access-Control-Allow-Credentials", "true"); //允许哪些url可以跨域请求到本域
+		response.setHeader("Access-Control-Allow-Origin", "*"); //允许哪些url可以跨域请求到本域
+		response.setHeader("Access-Control-Allow-Methods","POST"); //允许的请求方法,一般是GET,POST,PUT,DELETE,OPTIONS
+		response.setHeader("Access-Control-Allow-Headers","x-requested-with,content-type");
+	}
+
+	public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception {
+		response.setHeader("Access-Control-Allow-Credentials", "true"); //允许哪些url可以跨域请求到本域
+		response.setHeader("Access-Control-Allow-Origin", "*"); //允许哪些url可以跨域请求到本域
+		response.setHeader("Access-Control-Allow-Methods","POST"); //允许的请求方法,一般是GET,POST,PUT,DELETE,OPTIONS
+		response.setHeader("Access-Control-Allow-Headers","x-requested-with,content-type");
+	}
+}

+ 24 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/config/InterceptorConfig.java

@@ -0,0 +1,24 @@
+package org.diagbot.bigdata.config;
+
+import org.diagbot.bigdata.common.RegionInterceptor;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.web.servlet.config.annotation.InterceptorRegistration;
+import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
+import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
+
+/**
+ * @ClassName org.diagbot.nlp.config.InterceptorConfig
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/14/014 11:10
+ * @Version 1.0
+ **/
+@Configuration
+public class InterceptorConfig implements WebMvcConfigurer {
+    @Override
+    public void addInterceptors(InterceptorRegistry registry) {
+        //登录拦截的管理器
+        InterceptorRegistration registration = registry.addInterceptor(new RegionInterceptor());     //拦截的对象会进入这个类中进行判断
+        registration.addPathPatterns("/**");                    //所有路径都被拦截
+    }
+}

+ 41 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/config/MybatisConfiguration.java

@@ -0,0 +1,41 @@
+package org.diagbot.bigdata.config;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import org.apache.ibatis.session.SqlSessionFactory;
+import org.mybatis.spring.SqlSessionFactoryBean;
+import org.mybatis.spring.annotation.MapperScan;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
+import org.springframework.core.io.support.ResourcePatternResolver;
+
+import javax.sql.DataSource;
+
+/**
+ *@ClassName org.diagbot.bigdata.config.MybatisConfiguration
+ *@Description Spring 配置
+ *@Author fyeman
+ *@Date 2019/1/11/011 10:07
+ *@Version 1.0
+ **/
+
+@Configuration
+@MapperScan({"org.diagbot.bigdata.dao.mapper", "org.diagbot.common.dao.mapper"})
+public class MybatisConfiguration {
+    @Bean
+    public SqlSessionFactory sqlSessionFactory() throws Exception {
+        SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
+        sqlSessionFactoryBean.setDataSource(dataSource());
+        // 设置mybatis的主配置文件
+        ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
+        sqlSessionFactoryBean.setMapperLocations(resolver.getResources("classpath*:org/diagbot/*/dao/xml/*.xml"));
+        return sqlSessionFactoryBean.getObject();
+    }
+
+    @Bean
+    @ConfigurationProperties(prefix = "spring.datasource")
+    public DataSource dataSource(){
+        return new DruidDataSource();
+    }
+}

+ 41 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/controller/AlgorithmController.java

@@ -0,0 +1,41 @@
+package org.diagbot.bigdata.controller;
+
+import org.diagbot.bigdata.work.AlgorithmCore;
+import org.diagbot.bigdata.work.ResponseData;
+import org.diagbot.bigdata.work.SearchData;
+import org.diagbot.nlp.util.Constants;
+import org.diagbot.pub.api.Response;
+import org.diagbot.pub.web.BaseController;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.ResponseBody;
+
+import javax.servlet.http.HttpServletRequest;
+
+@Controller
+@RequestMapping("/algorithm")
+public class AlgorithmController extends BaseController {
+    private String[] negative_words = Constants.negative_words;
+
+    @RequestMapping(value = "/page_neural")
+    @ResponseBody
+    public Response<ResponseData> neuralData(HttpServletRequest request, SearchData searchData) throws Exception {
+        return algorithm(request, searchData);
+    }
+
+    @RequestMapping(value = "/neural", method = RequestMethod.POST)
+    @ResponseBody
+    public Response<ResponseData> bayesPageData(HttpServletRequest request, @RequestBody SearchData searchData) throws Exception {
+        return algorithm(request, searchData);
+    }
+
+    public Response<ResponseData> algorithm(HttpServletRequest request, SearchData searchData) throws Exception {
+        Response<ResponseData> response = new Response();
+        AlgorithmCore core = new AlgorithmCore();
+        ResponseData responseData = core.algorithm(request, searchData);
+        response.setData(responseData);
+        return response;
+    }
+}

+ 22 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/FeatureMappingMapper.java

@@ -0,0 +1,22 @@
+/** 
+* @Company: 杭州朗通信息技术有限公司
+* @Department: 医疗事业部
+* @Description: 互动反馈系统 
+* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
+*/
+package org.diagbot.bigdata.dao.mapper;
+
+import org.diagbot.bigdata.dao.model.FeatureMapping;
+import org.diagbot.bigdata.dao.model.wrapper.FeatureMappingWrapper;
+import org.diagbot.pub.orm.EntityMapper;
+
+/**
+* @Title: Feature.java
+* @Package: com.zjlantone.nlp.web.doc.dao.model
+* @Description: 数据库操作接口类 
+* @author: 楼辉荣
+* @date: 2016年8月8日 下午17:16:23
+* @version: V1.0
+*/
+public interface FeatureMappingMapper extends EntityMapper<FeatureMapping, FeatureMappingWrapper, Long> {
+}

+ 22 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingDiagMapper.java

@@ -0,0 +1,22 @@
+/** 
+* @Company: 杭州朗通信息技术有限公司
+* @Department: 医疗事业部
+* @Description: 互动反馈系统 
+* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
+*/
+package org.diagbot.bigdata.dao.mapper;
+
+import org.diagbot.bigdata.dao.model.ResultMappingDiag;
+import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
+import org.diagbot.pub.orm.EntityMapper;
+
+/**
+* @Title: Feature.java
+* @Package: com.zjlantone.nlp.web.doc.dao.model
+* @Description: 数据库操作接口类 
+* @author: 楼辉荣
+* @date: 2016年8月8日 下午17:16:23
+* @version: V1.0
+*/
+public interface ResultMappingDiagMapper extends EntityMapper<ResultMappingDiag, ResultMappingDiagWrapper, Long> {
+}

+ 22 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingFilterMapper.java

@@ -0,0 +1,22 @@
+/** 
+* @Company: 杭州朗通信息技术有限公司
+* @Department: 医疗事业部
+* @Description: 互动反馈系统 
+* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
+*/
+package org.diagbot.bigdata.dao.mapper;
+
+import org.diagbot.bigdata.dao.model.ResultMappingFilter;
+import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
+import org.diagbot.pub.orm.EntityMapper;
+
+/**
+* @Title: Feature.java
+* @Package: com.zjlantone.nlp.web.doc.dao.model
+* @Description: 数据库操作接口类 
+* @author: 楼辉荣
+* @date: 2016年8月8日 下午17:16:23
+* @version: V1.0
+*/
+public interface ResultMappingFilterMapper extends EntityMapper<ResultMappingFilter, ResultMappingFilterWrapper, Long> {
+}

+ 38 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/FeatureMapping.java

@@ -0,0 +1,38 @@
+package org.diagbot.bigdata.dao.model;
+
+import java.io.Serializable;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/9/10/010 14:30
+ * @Description:
+ */
+public class FeatureMapping implements Serializable {
+    private Long id;
+    private String name;
+    private String nameMapping;
+
+    public Long getId() {
+        return id;
+    }
+
+    public void setId(Long id) {
+        this.id = id;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public String getNameMapping() {
+        return nameMapping;
+    }
+
+    public void setNameMapping(String nameMapping) {
+        this.nameMapping = nameMapping;
+    }
+}

+ 38 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingDiag.java

@@ -0,0 +1,38 @@
+package org.diagbot.bigdata.dao.model;
+
+import java.io.Serializable;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/9/12/012 16:50
+ * @Description:
+ */
+public class ResultMappingDiag implements Serializable {
+    private Long id;
+    private String diagName;
+    private String deptName;
+
+    public Long getId() {
+        return id;
+    }
+
+    public void setId(Long id) {
+        this.id = id;
+    }
+
+    public String getDiagName() {
+        return diagName;
+    }
+
+    public void setDiagName(String diagName) {
+        this.diagName = diagName;
+    }
+
+    public String getDeptName() {
+        return deptName;
+    }
+
+    public void setDeptName(String deptName) {
+        this.deptName = deptName;
+    }
+}

+ 68 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingFilter.java

@@ -0,0 +1,68 @@
+package org.diagbot.bigdata.dao.model;
+
+
+public class ResultMappingFilter {
+    private Long id;
+    private String featureName;
+    private String featureType;
+    private String sex;
+    private int ageStart;
+    private int ageEnd;
+    private String remark;
+
+    public Long getId() {
+        return id;
+    }
+
+    public void setId(Long id) {
+        this.id = id;
+    }
+
+    public String getFeatureName() {
+        return featureName;
+    }
+
+    public void setFeatureName(String featureName) {
+        this.featureName = featureName;
+    }
+
+    public String getFeatureType() {
+        return featureType;
+    }
+
+    public void setFeatureType(String featureType) {
+        this.featureType = featureType;
+    }
+
+    public String getSex() {
+        return sex;
+    }
+
+    public void setSex(String sex) {
+        this.sex = sex;
+    }
+
+    public int getAgeStart() {
+        return ageStart;
+    }
+
+    public void setAgeStart(int ageStart) {
+        this.ageStart = ageStart;
+    }
+
+    public int getAgeEnd() {
+        return ageEnd;
+    }
+
+    public void setAgeEnd(int ageEnd) {
+        this.ageEnd = ageEnd;
+    }
+
+    public String getRemark() {
+        return remark;
+    }
+
+    public void setRemark(String remark) {
+        this.remark = remark;
+    }
+}

+ 7 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/FeatureMappingWrapper.java

@@ -0,0 +1,7 @@
+package org.diagbot.bigdata.dao.model.wrapper;
+
+
+import org.diagbot.bigdata.dao.model.FeatureMapping;
+
+public class FeatureMappingWrapper extends FeatureMapping {
+}

+ 12 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingDiagWrapper.java

@@ -0,0 +1,12 @@
+package org.diagbot.bigdata.dao.model.wrapper;
+
+
+import org.diagbot.bigdata.dao.model.ResultMappingDiag;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/9/12/012 16:51
+ * @Description:
+ */
+public class ResultMappingDiagWrapper extends ResultMappingDiag {
+}

+ 6 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingFilterWrapper.java

@@ -0,0 +1,6 @@
+package org.diagbot.bigdata.dao.model.wrapper;
+
+import org.diagbot.bigdata.dao.model.ResultMappingFilter;
+
+public class ResultMappingFilterWrapper extends ResultMappingFilter {
+}

+ 77 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/FeatureMappingMapper.xml

@@ -0,0 +1,77 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
+        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
+<mapper namespace="org.diagbot.bigdata.dao.mapper.FeatureMappingMapper">
+    <!-- 映射定义列-->
+    <resultMap type="org.diagbot.bigdata.dao.model.FeatureMapping" id="featureMappingMap">
+        <id property="id" column="id"/>
+        <result property="name" column="name"/>
+        <result property="nameMapping" column="name_mapping"/>
+    </resultMap>
+
+    <!-- 映射定义列-->
+    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.FeatureMappingWrapper" id="featureMappingWrapperMap">
+        <id property="id" column="id"/>
+        <result property="name" column="name"/>
+        <result property="nameMapping" column="name_mapping"/>
+    </resultMap>
+
+    <!-- 通用查询结果列-->
+    <sql id="Base_Column_List">
+		 t.id,	 t.name,	 t.name_mapping
+	</sql>
+
+    <!-- 查询(根据主键ID查询) -->
+    <select id="selectByPrimaryKey" resultMap="featureMappingMap" parameterType="java.lang.Integer">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_vital t
+        WHERE t.id = #{id}
+    </select>
+
+    <!-- 查询(根据主键ID查询) -->
+    <select id="selectWrapperByPrimaryKey" resultMap="featureMappingWrapperMap" parameterType="java.lang.Integer">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_vital t
+        WHERE t.id = #{id}
+    </select>
+
+    <!-- 依据Map查询条件返回结果集-->
+    <select id="selectList" resultMap="featureMappingMap" parameterType="java.util.Map">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_vital t WHERE 1=1
+        <if test="id != null and id != ''">
+            and t.id = #{id}
+        </if>
+        <if test="name != null and name != ''">
+            and t.name = #{name}
+        </if>
+        <if test="nameMapping != null and nameMapping != ''">
+            and t.name_mapping = #{nameMapping}
+        </if>
+    </select>
+
+    <!-- 依据Map查询条件返回扩展属性结果集-->
+    <select id="selectListWrapper" resultMap="featureMappingWrapperMap" parameterType="java.util.Map">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_vital t WHERE 1=1
+        <if test="id != null and id != ''">
+            and t.id = #{id}
+        </if>
+        <if test="name != null and name != ''">
+            and t.name = #{name}
+        </if>
+        <if test="nameMapping != null and nameMapping != ''">
+            and t.name_mapping = #{nameMapping}
+        </if>
+    </select>
+
+    <!--删除:根据主键ID删除-->
+    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
+		 DELETE FROM doc_result_mapping_vital
+		 WHERE id = #{id}
+	</delete>
+</mapper>

+ 55 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingDiagMapper.xml

@@ -0,0 +1,55 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
+        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
+<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingDiagMapper">
+    <!-- 映射定义列-->
+    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingDiag" id="resultMappingDiagMap">
+        <id property="id" column="id"/>
+        <result property="diagName" column="diag_name"/>
+        <result property="deptName" column="dept_name"/>
+    </resultMap>
+
+    <!-- 映射定义列-->
+    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper" id="resultMappingDiagWrapperMap">
+        <id property="id" column="id"/>
+        <result property="diagName" column="diag_name"/>
+        <result property="deptName" column="dept_name"/>
+    </resultMap>
+
+    <!-- 通用查询结果列-->
+    <sql id="Base_Column_List">
+		 t.id,	 t.diag_name,	 t.dept_name
+	</sql>
+
+    <!-- 查询(根据主键ID查询) -->
+    <select id="selectByPrimaryKey" resultMap="resultMappingDiagMap" parameterType="java.lang.Integer">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_diag t
+        WHERE t.id = #{id}
+    </select>
+
+    <!-- 查询(根据主键ID查询) -->
+    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingDiagWrapperMap" parameterType="java.lang.Integer">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_diag t
+        WHERE t.id = #{id}
+    </select>
+
+    <!-- 依据Map查询条件返回结果集-->
+    <select id="selectList" resultMap="resultMappingDiagMap" parameterType="java.util.Map">
+        select diag_name, group_concat(dept_name) dept_name from doc_result_mapping_diag  group by diag_name
+    </select>
+
+    <!-- 依据Map查询条件返回扩展属性结果集-->
+    <select id="selectListWrapper" resultMap="resultMappingDiagWrapperMap" parameterType="java.util.Map">
+        select diag_name, group_concat(dept_name) dept_name from doc_result_mapping_diag  group by diag_name
+    </select>
+
+    <!--删除:根据主键ID删除-->
+    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
+		 DELETE FROM doc_result_mapping_diag
+		 WHERE id = #{id}
+	</delete>
+</mapper>

+ 67 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingFilterMapper.xml

@@ -0,0 +1,67 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
+        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
+<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingFilterMapper">
+    <!-- 映射定义列-->
+    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingFilter" id="resultMappingFilterMap">
+        <id property="id" column="id"/>
+        <result property="featureName" column="feature_name"/>
+        <result property="featureType" column="feature_type"/>
+        <result property="sex" column="sex"/>
+        <result property="ageStart" column="age_start"/>
+        <result property="ageEnd" column="age_end"/>
+        <result property="remark" column="remark"/>
+    </resultMap>
+
+    <!-- 映射定义列-->
+    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper" id="resultMappingFilterWrapperMap">
+        <id property="id" column="id"/>
+        <result property="featureName" column="feature_name"/>
+        <result property="featureType" column="feature_type"/>
+        <result property="sex" column="sex"/>
+        <result property="ageStart" column="age_start"/>
+        <result property="ageEnd" column="age_end"/>
+        <result property="remark" column="remark"/>
+    </resultMap>
+
+    <!-- 通用查询结果列-->
+    <sql id="Base_Column_List">
+		 t.id, t.feature_name, t.feature_type, t.sex, t.age_start, t.age_end, t.remark
+	</sql>
+
+    <!-- 查询(根据主键ID查询) -->
+    <select id="selectByPrimaryKey" resultMap="resultMappingFilterMap" parameterType="java.lang.Integer">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_filter t
+        WHERE t.id = #{id}
+    </select>
+
+    <!-- 查询(根据主键ID查询) -->
+    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingFilterWrapperMap" parameterType="java.lang.Integer">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_filter t
+        WHERE t.id = #{id}
+    </select>
+
+    <!-- 依据Map查询条件返回结果集-->
+    <select id="selectList" resultMap="resultMappingFilterMap" parameterType="java.util.Map">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_filter t WHERE 1=1
+    </select>
+
+    <!-- 依据Map查询条件返回扩展属性结果集-->
+    <select id="selectListWrapper" resultMap="resultMappingFilterWrapperMap" parameterType="java.util.Map">
+        SELECT
+        <include refid="Base_Column_List"/>
+        FROM doc_result_mapping_filter t WHERE 1=1
+    </select>
+
+    <!--删除:根据主键ID删除-->
+    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
+		 DELETE FROM doc_result_mapping_filter
+		 WHERE id = #{id}
+	</delete>
+</mapper>

+ 8 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/service/FeatureMappingService.java

@@ -0,0 +1,8 @@
+package org.diagbot.bigdata.service;
+
+import org.diagbot.bigdata.dao.model.FeatureMapping;
+import org.diagbot.bigdata.dao.model.wrapper.FeatureMappingWrapper;
+import org.diagbot.pub.service.BaseService;
+
+public interface FeatureMappingService extends BaseService<FeatureMapping, FeatureMappingWrapper, Long> {
+}

+ 8 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingDiagService.java

@@ -0,0 +1,8 @@
+package org.diagbot.bigdata.service;
+
+import org.diagbot.bigdata.dao.model.ResultMappingDiag;
+import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
+import org.diagbot.pub.service.BaseService;
+
+public interface ResultMappingDiagService extends BaseService<ResultMappingDiag, ResultMappingDiagWrapper, Long> {
+}

+ 8 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingFilterService.java

@@ -0,0 +1,8 @@
+package org.diagbot.bigdata.service;
+
+import org.diagbot.bigdata.dao.model.ResultMappingFilter;
+import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
+import org.diagbot.pub.service.BaseService;
+
+public interface ResultMappingFilterService extends BaseService<ResultMappingFilter, ResultMappingFilterWrapper, Long> {
+}

+ 21 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/FeatureMappingServiceImpl.java

@@ -0,0 +1,21 @@
+package org.diagbot.bigdata.service.impl;
+
+import org.diagbot.bigdata.dao.mapper.FeatureMappingMapper;
+import org.diagbot.bigdata.dao.model.FeatureMapping;
+import org.diagbot.bigdata.dao.model.wrapper.FeatureMappingWrapper;
+import org.diagbot.bigdata.service.FeatureMappingService;
+import org.diagbot.pub.service.BaseServiceImpl;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+@Service
+public class FeatureMappingServiceImpl extends BaseServiceImpl<FeatureMapping, FeatureMappingWrapper, Long> implements FeatureMappingService {
+    @Autowired
+    FeatureMappingMapper featureMappingMapper;
+
+    @Autowired
+    private void setEntityMapper() {
+        super.setEntityMapper(featureMappingMapper);
+    }
+
+}

+ 21 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingDiagServiceImpl.java

@@ -0,0 +1,21 @@
+package org.diagbot.bigdata.service.impl;
+
+import org.diagbot.bigdata.dao.mapper.ResultMappingDiagMapper;
+import org.diagbot.bigdata.dao.model.ResultMappingDiag;
+import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
+import org.diagbot.bigdata.service.ResultMappingDiagService;
+import org.diagbot.pub.service.BaseServiceImpl;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+@Service
+public class ResultMappingDiagServiceImpl extends BaseServiceImpl<ResultMappingDiag, ResultMappingDiagWrapper, Long> implements ResultMappingDiagService {
+    @Autowired
+    ResultMappingDiagMapper resultMappingDiagMapper;
+
+    @Autowired
+    private void setEntityMapper() {
+        super.setEntityMapper(resultMappingDiagMapper);
+    }
+
+}

+ 21 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingFilterServiceImpl.java

@@ -0,0 +1,21 @@
+package org.diagbot.bigdata.service.impl;
+
+import org.diagbot.bigdata.dao.mapper.ResultMappingFilterMapper;
+import org.diagbot.bigdata.dao.model.ResultMappingFilter;
+import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
+import org.diagbot.bigdata.service.ResultMappingFilterService;
+import org.diagbot.pub.service.BaseServiceImpl;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+@Service
+public class ResultMappingFilterServiceImpl extends BaseServiceImpl<ResultMappingFilter, ResultMappingFilterWrapper, Long> implements ResultMappingFilterService {
+    @Autowired
+    ResultMappingFilterMapper resultMappingFilterMapper;
+
+    @Autowired
+    private void setEntityMapper() {
+        super.setEntityMapper(resultMappingFilterMapper);
+    }
+
+}

+ 38 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/util/BigDataConstants.java

@@ -0,0 +1,38 @@
+package org.diagbot.bigdata.util;
+
+/**
+ * @ClassName org.diagbot.bigdata.util.BigDataConstants
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 14:06
+ * @Version 1.0
+ **/
+public class BigDataConstants {
+    public final static String resource_type_i = "I";       //住院
+    public final static String resource_type_o = "O";       //门诊
+    public final static String resource_type_e = "E";       //急诊
+    public final static String resource_type_oe = "OE";     //急诊 门诊合并
+
+    public final static String feature_type_symptom  = "1"; //症状
+    public final static String feature_type_diag  = "2";    //诊断
+    public final static String feature_type_vital  = "3";   //体征
+    public final static String feature_type_lis = "4";      //化验
+    public final static String feature_type_pacs = "5";     //检查
+    public final static String feature_type_treat = "6";       //治疗
+    public final static String feature_type_history = "7";       //历史
+    public final static String feature_type_feature = "9"; //症状描述中的特征信息 如部位、性质等
+
+    public final static String standard_info_diag_cache = "standardInfoDiagMap";                    //词典库疾病缓存
+    public final static String standard_info_symptom_cache = "standardInfoSymptomMap";             //症状信息缓存
+    public final static String standard_info_synonym_cache = "standardInfoSynonymMap";             //同义词缓存
+    public final static String standard_info_type_cache = "standardInfoTypeMap";             //大小类关系词缓存
+    public final static String standard_info_type_tree_cache = "standardInfoTypeTreeMap";             //大小类关系词缓存-树形结构
+
+    //词典库relation_id定义
+    public final static String standard_info_relation_type_2 = "2";    //同义词
+    public final static String standard_info_relation_type_3 = "3";    //大小类
+
+    public final static String feature_name_mapping = "featureNameMappingMap";          //推送体征结果名称映射
+    public final static String result_mapping_diag = "resultMappingDiagMap";          //推送疾病科室名称映射
+    public final static String result_mapping_filter = "resultMappingFilterMap";          //推送结果年龄 性别过滤
+}

+ 88 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java

@@ -0,0 +1,88 @@
+package org.diagbot.bigdata.work;
+
+import org.algorithm.core.AlgorithmExecutor;
+import org.algorithm.factory.AlgorithmFactory;
+import org.algorithm.util.AlgorithmClassify;
+import org.diagbot.nlp.feature.FeatureType;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.*;
+
+/**
+ * @ClassName org.diagbot.bigdata.work.AlgorithmCore
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 17:01
+ * @Version 1.0
+ **/
+public class AlgorithmCore {
+    public ResponseData algorithm(HttpServletRequest request, SearchData searchData) throws Exception {
+        ResponseData responseData = new ResponseData();
+        //录入文本处理,包括提取特征、推送类型转换等
+        ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
+        paramsDataProxy.createSearchData(request, searchData);
+        responseData.setInputs(searchData.getInputs());
+        //推送模型类型集合
+        AlgorithmClassify[] classifies = searchData.getAlgorithmClassify();
+        String[] featureTypes = searchData.getFeatureTypes();
+        //推送结果处理
+        ResultDataProxy resultDataProxy = new ResultDataProxy();
+        for (int i = 0; i < classifies.length; i++) {
+            if (classifies[i] == null) {
+                continue;
+            }
+            //算法推理
+            AlgorithmExecutor executor = AlgorithmFactory.getInstance(classifies[i]);
+            Map<String, Float> featuresMap = null;
+            if (executor != null) {
+                featuresMap = executor.execute(searchData.getInputs());;
+            }
+            List<Map.Entry<String, Float>> featuresOrderList = null;
+            if (featuresMap == null) {
+                featuresOrderList = new ArrayList<Map.Entry<String, Float>>();
+            } else {
+                //同义词转化
+                featuresMap = resultDataProxy.synonymConvert(request, featuresMap);
+                //大小类合并
+                featuresMap = resultDataProxy.resultMerge(request, featuresMap);
+                //按模型计算的概率排序
+                featuresOrderList = new ArrayList<Map.Entry<String, Float>>(featuresMap.entrySet());
+                Collections.sort(featuresOrderList, new Comparator<Map.Entry<String, Float>>() {
+                    public int compare(Map.Entry<String, Float> o1, Map.Entry<String, Float> o2) {
+                        if (o2.getValue() - o1.getValue() > 0) {
+                            return 1;
+                        } else if (o2.getValue() - o1.getValue() < 0) {
+                            return -1;
+                        } else {
+                            return 0;
+                        }
+                    }
+                });
+            }
+            List<FeatureRate> featureRates = resultDataProxy.proxy(request, searchData, featuresOrderList, featureTypes[i]);
+
+
+            switch (FeatureType.parse(featureTypes[i])) {
+                case SYMPTOM:
+                    responseData.setSymptom(featureRates);
+                    break;
+                case DIAG:
+                    responseData.setDis(featureRates);
+                    break;
+                case VITAL:
+                    responseData.setVitals(featureRates);
+                    break;
+                case LIS:
+                    responseData.setLabs(featureRates);
+                    break;
+                case PACS:
+                    responseData.setPacs(featureRates);
+                    break;
+                case HISTORY:
+                    responseData.setHistory(featureRates);
+                    break;
+            }
+        }
+        return responseData;
+    }
+}

+ 43 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/work/FeatureRate.java

@@ -0,0 +1,43 @@
+package org.diagbot.bigdata.work;
+
+/**
+ * Created by fyeman on 2018/1/17.
+ */
+public class FeatureRate {
+    private String featureName;
+    private String extraProperty;
+    private String desc;
+    private String rate;
+
+    public String getFeatureName() {
+        return featureName;
+    }
+
+    public void setFeatureName(String featureName) {
+        this.featureName = featureName;
+    }
+
+    public String getRate() {
+        return rate;
+    }
+
+    public void setRate(String rate) {
+        this.rate = rate;
+    }
+
+    public String getExtraProperty() {
+        return extraProperty;
+    }
+
+    public void setExtraProperty(String extraProperty) {
+        this.extraProperty = extraProperty;
+    }
+
+    public String getDesc() {
+        return desc;
+    }
+
+    public void setDesc(String desc) {
+        this.desc = desc;
+    }
+}

+ 180 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/work/ParamsDataProxy.java

@@ -0,0 +1,180 @@
+package org.diagbot.bigdata.work;
+
+import org.algorithm.util.AlgorithmClassify;
+import org.apache.commons.beanutils.BeanUtils;
+import org.diagbot.bigdata.util.BigDataConstants;
+import org.diagbot.nlp.feature.FeatureAnalyze;
+import org.diagbot.nlp.feature.FeatureType;
+import org.springframework.util.StringUtils;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 14:04
+ * @Version 1.0
+ **/
+public class ParamsDataProxy {
+    public void createSearchData(HttpServletRequest request, SearchData searchData) throws Exception {
+        //消除空格
+        if (searchData.getSymptom() != null) {
+            searchData.setSymptom(searchData.getSymptom().trim());
+        }
+        if (searchData.getDiag() != null) {
+            searchData.setDiag(searchData.getDiag().trim());
+        }
+        //计算年龄区间
+        if (searchData.getAge() > 0) {
+            searchData.setAge_start(searchData.getAge() - 5);
+            searchData.setAge_end(searchData.getAge() + 5);
+        }
+        //默认查询门诊数据
+        if (StringUtils.isEmpty(searchData.getResourceType())) {
+            searchData.setResourceType(BigDataConstants.resource_type_o);
+        }
+        //一次推送多个类别信息
+        String[] featureTypes = searchData.getFeatureType().split(",");
+        searchData.setFeatureTypes(featureTypes);
+        //featureType转算法模型类别
+        searchData.setAlgorithmClassify(createAlgorithmClassify(searchData.getSysCode(), featureTypes, searchData.getDiag()));
+        //获取入参中的特征信息
+        FeatureAnalyze fa = new FeatureAnalyze();
+
+        if (!StringUtils.isEmpty(searchData.getSymptom())) {
+            List<Map<String, Object>> featuresList = fa.start(searchData.getSymptom(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList, "symptomFeatureList", FeatureType.FEATURE);
+        }
+    }
+
+    /**
+     * featureType转算法模型类型
+     * @param sysCode
+     * @param featureTypes
+     * @param diag
+     */
+    private AlgorithmClassify[] createAlgorithmClassify(String sysCode, String[] featureTypes, String diag) {
+        AlgorithmClassify[] classifies = new AlgorithmClassify[featureTypes.length];
+        for (int i = 0; i < featureTypes.length; i++) {
+            featureTypes[i] = convertFeatureType(sysCode, featureTypes[i]);
+            if (featureTypes[i] != null) {
+                //模型
+                switch (FeatureType.parse(featureTypes[i])) {
+                    case SYMPTOM:
+                        if (StringUtils.isEmpty(diag)) {
+                            classifies[i] = AlgorithmClassify.NEURAL_SYMPTOM;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_SYMPTOM;
+                        }
+                        break;
+                    case DIAG:
+                        if (StringUtils.isEmpty(diag)) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG;
+                        }  else {
+                            classifies[i] = null;
+                        }
+                        break;
+                    case VITAL:
+                        if (StringUtils.isEmpty(diag)) {
+                            classifies[i] = AlgorithmClassify.NEURAL_VITAL;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_VITAL;
+                        }
+                        break;
+                    case LIS:
+                        if (StringUtils.isEmpty(diag)) {
+                            classifies[i] = AlgorithmClassify.NEURAL_LIS;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_LIS;
+                        }
+                        break;
+                    case PACS:
+                        if (StringUtils.isEmpty(diag)) {
+                            classifies[i] = AlgorithmClassify.NEURAL_PACS;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_PACS;
+                        }
+                        break;
+                    case TREAT:
+                        if (StringUtils.isEmpty(diag)) {
+                            classifies[i] = AlgorithmClassify.NEURAL_TREAT;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_TREAT;
+                        }
+                        break;
+                    case HISTORY:
+                        if (StringUtils.isEmpty(diag)) {
+                            classifies[i] = AlgorithmClassify.NEURAL_HISTORY;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_HISTORY;
+                        }
+                        break;
+                }
+            }
+        }
+        return classifies;
+    }
+
+    /**
+     * 外部系统featureType需要转化为大数据定义的featureType
+     * @param sysCode
+     * @param featureType
+     * @return
+     */
+    private String convertFeatureType(String sysCode, String featureType) {
+        if (StringUtils.isEmpty(sysCode) || sysCode.equals("1")) {
+            if ("1".equals(featureType)) {
+                return BigDataConstants.feature_type_symptom;
+            }
+            if ("7".equals(featureType)) {
+                return BigDataConstants.feature_type_diag;
+            }
+            if ("4".equals(featureType)) {
+                return BigDataConstants.feature_type_vital;
+            }
+            if ("5".equals(featureType)) {
+                return BigDataConstants.feature_type_lis;
+            }
+            if ("6".equals(featureType)) {
+                return BigDataConstants.feature_type_pacs;
+            }
+            if ("3".equals(featureType)) {
+                return BigDataConstants.feature_type_history;
+            }
+            if ("8".equals(featureType)) {
+                return BigDataConstants.feature_type_treat;
+            }
+            return null;
+        }
+        return featureType;
+    }
+
+    /**
+     * 推送模型入参
+     *
+     * @param searchData
+     * @param property_list
+     * @param featureType
+     * @throws Exception
+     */
+    private void paramFeatureInit(SearchData searchData, List<Map<String, Object>> featuresList, String property_list, FeatureType featureType) throws Exception {
+        if (featuresList != null && featuresList.size() > 0) {
+            BeanUtils.setProperty(searchData, property_list, featuresList);
+            Map<String, String> map = new HashMap<>();
+            Map<String, Object> featureMap = null;
+            for (int i = 0; i < featuresList.size(); i++) {
+                featureMap = featuresList.get(i);
+                for (Map.Entry<String, Object> entry : featureMap.entrySet()) {
+                    map.put(entry.getKey(), String.valueOf(entry.getValue()));
+                }
+                map.put("featureType", String.valueOf(featureMap.get("feature_type")));
+                map.put("featureName", String.valueOf(featureMap.get("feature_name")));
+                searchData.getInputs().put(map.get("feature_name"), map);
+            }
+        }
+    }
+}

+ 96 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/work/ResponseData.java

@@ -0,0 +1,96 @@
+package org.diagbot.bigdata.work;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Created by fyeman on 2018/2/2.
+ */
+public class ResponseData {
+    private String participleSymptom = "";
+
+    private List<FeatureRate> symptom = new ArrayList<>(10);
+    private List<FeatureRate> vitals = new ArrayList<>(10);
+    private List<FeatureRate> dis = new ArrayList<>(10);
+    private List<FeatureRate> labs = new ArrayList<>(10);
+    private List<FeatureRate> pacs = new ArrayList<>(10);
+    private List<FeatureRate> history = new ArrayList<>(10);
+
+    private Map<String, String> treat = new HashMap<>();
+
+    private Map<String, Map<String, String>> inputs = new HashMap<>(10,0.5f);
+
+    public String getParticipleSymptom() {
+        return participleSymptom;
+    }
+
+    public void setParticipleSymptom(String participleSymptom) {
+        this.participleSymptom = participleSymptom;
+    }
+
+    public List<FeatureRate> getSymptom() {
+        return symptom;
+    }
+
+    public void setSymptom(List<FeatureRate> symptom) {
+        this.symptom = symptom;
+    }
+
+    public List<FeatureRate> getVitals() {
+        return vitals;
+    }
+
+    public void setVitals(List<FeatureRate> vitals) {
+        this.vitals = vitals;
+    }
+
+    public List<FeatureRate> getDis() {
+        return dis;
+    }
+
+    public void setDis(List<FeatureRate> dis) {
+        this.dis = dis;
+    }
+
+    public List<FeatureRate> getLabs() {
+        return labs;
+    }
+
+    public void setLabs(List<FeatureRate> labs) {
+        this.labs = labs;
+    }
+
+    public List<FeatureRate> getPacs() {
+        return pacs;
+    }
+
+    public void setPacs(List<FeatureRate> pacs) {
+        this.pacs = pacs;
+    }
+
+    public Map<String, Map<String, String>> getInputs() {
+        return inputs;
+    }
+
+    public void setInputs(Map<String, Map<String, String>> inputs) {
+        this.inputs = inputs;
+    }
+
+    public Map<String, String> getTreat() {
+        return treat;
+    }
+
+    public void setTreat(Map<String, String> treat) {
+        this.treat = treat;
+    }
+
+    public List<FeatureRate> getHistory() {
+        return history;
+    }
+
+    public void setHistory(List<FeatureRate> history) {
+        this.history = history;
+    }
+}

+ 194 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java

@@ -0,0 +1,194 @@
+package org.diagbot.bigdata.work;
+
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.bigdata.common.ApplicationCacheUtil;
+import org.diagbot.bigdata.dao.model.ResultMappingFilter;
+import org.diagbot.bigdata.util.BigDataConstants;
+import org.diagbot.nlp.feature.FeatureType;
+
+import javax.servlet.http.HttpServletRequest;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @ClassName org.diagbot.bigdata.work.ResultDataProxy
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 14:55
+ * @Version 1.0
+ **/
+public class ResultDataProxy {
+    public List<FeatureRate> proxy(HttpServletRequest request, SearchData searchData, List<Map.Entry<String, Float>> featuresOrderList, String featureType) {
+        int cursor = 1;
+        DecimalFormat df = new DecimalFormat("0.####");
+        List<FeatureRate> featureList = new ArrayList<>(10);
+
+        Map<String, String> featureNameMappingMap = (Map<String, String>) request.getServletContext().getAttribute(BigDataConstants.feature_name_mapping);
+        Map<String, String> resultMappingDiagMap = (Map<String, String>) request.getServletContext().getAttribute(BigDataConstants.result_mapping_diag);
+        Map<String, Map<String, ResultMappingFilter>> resultMappingFilterMap = (Map<String, Map<String, ResultMappingFilter>>) request.getServletContext().getAttribute(BigDataConstants.result_mapping_filter);
+        for (Map.Entry<String, Float> entry : featuresOrderList) {
+            //过滤特征数据, 过滤低于阈值的数据
+            if (!resultFilter(request, searchData, featureType, entry.getKey(), entry.getValue())) {
+                continue;
+            }
+            //性别年龄过滤
+            Map<String, ResultMappingFilter> filterMap = resultMappingFilterMap.get(featureType);
+            if (filterMap != null) {
+                ResultMappingFilter filter = filterMap.get(entry.getKey());
+                if (filter != null) {
+                    if (filter.getSex() != null && !StringUtils.isEmpty(searchData.getSex()) && !"A".equals(searchData.getSex())
+                            && filter.getSex().equals(searchData.getSex())) {      //性别过滤
+                        continue;
+                    }
+                    //年龄过滤
+                    if (filter.getAgeStart() > -1 && searchData.getAge() != 0 && searchData.getAge() < filter.getAgeStart()) {
+                        continue;
+                    }
+                    if (filter.getAgeEnd() > -1 && searchData.getAge() != 0 && searchData.getAge() > filter.getAgeEnd()) {
+                        continue;
+                    }
+                }
+            }
+            FeatureRate featureRate = new FeatureRate();
+            featureRate.setRate(df.format(entry.getValue()));
+            if (FeatureType.parse(featureType) == FeatureType.VITAL) {
+                if (StringUtils.isEmpty(featureNameMappingMap.get(entry.getKey()))) {
+                    continue;
+                } else {
+                    featureRate.setFeatureName(featureNameMappingMap.get(entry.getKey()));
+                }
+            } else {
+                featureRate.setFeatureName(entry.getKey());
+            }
+
+            if (FeatureType.parse(featureType) == FeatureType.DIAG) {
+                featureRate.setFeatureName(entry.getKey());
+                if (StringUtils.isNotEmpty(resultMappingDiagMap.get(entry.getKey()))) {
+                    featureRate.setExtraProperty(resultMappingDiagMap.get(entry.getKey()));
+                }
+            }
+            featureList.add(featureRate);
+            if (cursor < searchData.getLength()) {
+                cursor++;
+            } else {
+                break;
+            }
+        }
+
+        return featureList;
+    }
+
+    public boolean resultFilter(HttpServletRequest request, SearchData searchData, String featureType, String result, float threshold) {
+        //过滤录入条件中已有的特征信息
+        if (searchData.getInputs().get(result) != null && FeatureType.parse(featureType) != FeatureType.DIAG) {
+            return false;
+        }
+        return true;
+    }
+
+    public Map<String, Float> synonymConvert(HttpServletRequest request, Map<String, Float> map) {
+        Map<String, String> standardInfoSynonymMap = (Map<String, String>) request.getServletContext().getAttribute(BigDataConstants.standard_info_synonym_cache);
+        Map<String, Float> result = new HashMap<>();
+        String synonym = "";
+        for (Map.Entry<String, Float> entry : map.entrySet()) {
+            synonym = standardInfoSynonymMap.get(entry.getKey());
+            if (synonym != null) {
+                if (result.get(synonym) == null) {
+                    result.put(synonym, entry.getValue());
+                } else {
+                    result.put(synonym, result.get(synonym) + entry.getValue());
+                }
+            } else {
+                result.put(entry.getKey(), entry.getValue());
+            }
+        }
+        return result;
+    }
+
+    /**
+     * 大小类数据合并
+     *
+     * @param request
+     * @param map
+     */
+    public Map<String, Float> resultMerge(HttpServletRequest request, Map<String, Float> map) {
+        Map<String, ApplicationCacheUtil.Node> nodesMap = (Map<String, ApplicationCacheUtil.Node>) request.getServletContext().getAttribute(BigDataConstants.standard_info_type_tree_cache);
+        Map<String, Float> resultMap = new HashMap<>();
+        //设定阀值
+        float threshold = 0.001f;
+        Map<String, Float> thresholdMap = new HashMap<>();
+        for (Map.Entry<String, Float> entry : map.entrySet()) {
+            if (entry.getValue() >= threshold) {
+                thresholdMap.put(entry.getKey(), entry.getValue());
+            }
+        }
+
+        ApplicationCacheUtil.Node node = null;
+        List<String> delList = new ArrayList<>();
+        for (Map.Entry<String, Float> entry : thresholdMap.entrySet()) {
+            if (delList.contains(entry.getKey())) continue;
+
+            node = nodesMap.get(entry.getKey());
+            if (node != null) {
+                String topName = node.getName();
+                ApplicationCacheUtil.Node p = node.getParent();
+                if (p != null && nodesMap.get(p.getName()) != null) {
+                    topName = p.getName();
+                }
+                while (p != null) {
+                    List<String> nodeNamesList = new ArrayList<>();
+                    lookChilds(topName, p, thresholdMap, nodeNamesList);
+                    if (nodeNamesList.size() > 0) {
+                        topName = p.getName();
+                    }
+                    p = p.getParent();
+                }
+
+                if (thresholdMap.get(topName) != null) {
+                    resultMap.put(topName, thresholdMap.get(topName));
+                    delList.add(topName);
+                }
+                ApplicationCacheUtil.Node topNode = nodesMap.get(topName);
+                lookChildsAndCal(resultMap, thresholdMap, topNode, delList, topNode.getName());
+                delList.add(topName);
+            } else {
+                resultMap.put(entry.getKey(), entry.getValue());
+            }
+        }
+        return resultMap;
+    }
+
+    private void lookChilds(String own, ApplicationCacheUtil.Node p, Map<String, Float> thresholdMap, List<String> nodeNamesList) {
+        for (ApplicationCacheUtil.Node n : p.getChilds()) {
+            if (own.equals(n.getName())) {
+                continue;
+            } else {
+                if (thresholdMap.get(n.getName()) != null) {
+                    nodeNamesList.add(n.getName());
+                }
+                if (n.getChilds().size() > 0) {
+                    lookChilds("", n, thresholdMap, nodeNamesList);
+                }
+            }
+        }
+    }
+
+    private void lookChildsAndCal(Map<String, Float> resultMap, Map<String, Float> thresholdMap, ApplicationCacheUtil.Node node, List<String> delList, String topName) {
+        for (ApplicationCacheUtil.Node n : node.getChilds()) {
+            if (thresholdMap.get(n.getName()) != null) {
+                if (resultMap.get(topName) == null) {
+                    resultMap.put(topName, thresholdMap.get(n.getName()));
+                } else {
+                    resultMap.put(topName, resultMap.get(topName) + thresholdMap.get(n.getName()));
+                }
+                delList.add(n.getName());
+            }
+            if (n.getChilds().size() > 0) {
+                lookChildsAndCal(resultMap, thresholdMap, n, delList, topName);
+            }
+        }
+    }
+}

+ 196 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/work/SearchData.java

@@ -0,0 +1,196 @@
+package org.diagbot.bigdata.work;
+
+import org.algorithm.util.AlgorithmClassify;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class SearchData {
+    private int length = 10;
+    private int age_start = 0;
+    private int age_end = 200;
+    private int age = 0;
+    private String sex;
+    // 搜索结果的贝叶斯阈值
+    private String threshold = "0";
+    //特征类别
+    private String featureType;
+    //特征类别对","进行分割后数据
+    private String[] featureTypes;
+    //门诊 住院分类
+    private String resourceType;
+    //模型
+    private AlgorithmClassify algorithmClassify[];
+    //模型
+    private String algorithmClassifyValue;
+    //外部系统编码 用于返回映射数据,如果sysCode为空或null,则返回kl_standard_info标准名称
+    private String sysCode;
+
+    private String symptom = "";
+    private String vital = "";
+    private String lis = "";
+    private String pacs = "";
+    private String diag = "";
+    private String past = "";
+    private String other = "";
+
+    private Map<String, Map<String, String>> inputs = new HashMap<>(10, 0.8f);
+
+    public int getLength() {
+        return length;
+    }
+
+    public void setLength(int length) {
+        this.length = length;
+    }
+
+    public int getAge_start() {
+        return age_start;
+    }
+
+    public void setAge_start(int age_start) {
+        this.age_start = age_start;
+    }
+
+    public int getAge_end() {
+        return age_end;
+    }
+
+    public void setAge_end(int age_end) {
+        this.age_end = age_end;
+    }
+
+    public int getAge() {
+        return age;
+    }
+
+    public void setAge(int age) {
+        this.age = age;
+    }
+
+    public String getSex() {
+        return sex;
+    }
+
+    public void setSex(String sex) {
+        this.sex = sex;
+    }
+
+    public String getFeatureType() {
+        return featureType;
+    }
+
+    public void setFeatureType(String featureType) {
+        this.featureType = featureType;
+    }
+
+    public String[] getFeatureTypes() {
+        return featureTypes;
+    }
+
+    public void setFeatureTypes(String[] featureTypes) {
+        this.featureTypes = featureTypes;
+    }
+
+    public String getResourceType() {
+        return resourceType;
+    }
+
+    public void setResourceType(String resourceType) {
+        this.resourceType = resourceType;
+    }
+
+    public String getSysCode() {
+        return sysCode;
+    }
+
+    public void setSysCode(String sysCode) {
+        this.sysCode = sysCode;
+    }
+
+    public void setThreshold(String threshold) {
+        this.threshold = threshold;
+    }
+
+    public float getThreshold() { return Float.parseFloat(threshold); }
+
+    public Map<String, Map<String, String>> getInputs() {
+        return inputs;
+    }
+
+    public void setInputs(Map<String, Map<String, String>> inputs) {
+        this.inputs = inputs;
+    }
+
+    public String getAlgorithmClassifyValue() {
+        return algorithmClassifyValue;
+    }
+
+    public void setAlgorithmClassifyValue(String algorithmClassifyValue) {
+        this.algorithmClassifyValue = algorithmClassifyValue;
+    }
+
+    public AlgorithmClassify[] getAlgorithmClassify() {
+        return algorithmClassify;
+    }
+
+    public void setAlgorithmClassify(AlgorithmClassify[] algorithmClassify) {
+        this.algorithmClassify = algorithmClassify;
+    }
+
+    public String getSymptom() {
+        return symptom;
+    }
+
+    public void setSymptom(String symptom) {
+        this.symptom = symptom;
+    }
+
+    public String getVital() {
+        return vital;
+    }
+
+    public void setVital(String vital) {
+        this.vital = vital;
+    }
+
+    public String getLis() {
+        return lis;
+    }
+
+    public void setLis(String lis) {
+        this.lis = lis;
+    }
+
+    public String getPacs() {
+        return pacs;
+    }
+
+    public void setPacs(String pacs) {
+        this.pacs = pacs;
+    }
+
+    public String getDiag() {
+        return diag;
+    }
+
+    public void setDiag(String diag) {
+        this.diag = diag;
+    }
+
+    public String getPast() {
+        return past;
+    }
+
+    public void setPast(String past) {
+        this.past = past;
+    }
+
+    public String getOther() {
+        return other;
+    }
+
+    public void setOther(String other) {
+        this.other = other;
+    }
+}

+ 38 - 0
bigdata-web/src/main/resources/application.yml

@@ -0,0 +1,38 @@
+server:
+  port: 5001 # 端口号
+  servlet:
+    context-path: ${spring.application.name}  # 访问路径,如果不配置,访问IP:端口号,配置后访问IP:端口号/${context-path}
+
+spring:
+  application:
+    name: /bigdata-web    # 项目名称
+  http:
+    encoding:     # http编码
+      force: true
+      charset: UTF-8
+      enabled: true
+  datasource:       # mybatis 配置,使用druid数据源
+    url: jdbc:mysql://192.168.2.235:3306/bigdata-web?useUnicode=true&characterEncoding=UTF-8
+    username: root
+    password: diagbot@20180822
+    type: com.alibaba.druid.pool.DruidDataSource
+    driver-class-name: com.mysql.jdbc.Driver
+    filters: stat
+    maxActive: 20
+    initialSize: 1
+    maxWait: 60000
+    minIdle: 1
+    timeBetweenEvictionRunsMillis: 60000
+    minEvictableIdleTimeMillis: 300000
+    validationQuery: select 'x'
+    testWhileIdle: true
+    testOnBorrow: false
+    testOnReturn: false
+    poolPreparedStatements: true
+    maxOpenPreparedStatements: 20
+
+logging:          # 日志
+  level.root: info
+  level.org.diagbot: debug
+  path: logs/
+  file: bigdata-web.log

+ 17 - 0
bigdata-web/src/test/java/org/diagbot/BigdataWebApplicationTests.java

@@ -0,0 +1,17 @@
+package org.diagbot;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.context.junit4.SpringRunner;
+
+@RunWith(SpringRunner.class)
+@SpringBootTest
+public class BigdataWebApplicationTests {
+
+	@Test
+	public void contextLoads() {
+	}
+
+}
+

+ 30 - 0
bigdata/pom.xml

@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>push</artifactId>
+        <groupId>org.diagbot</groupId>
+        <version>1.0.0</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>bigdata</artifactId>
+
+    <name>bigdata</name>
+    <!-- FIXME change it to the project's website -->
+    <url>http://www.example.com</url>
+
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <maven.compiler.source>1.8</maven.compiler.source>
+        <maven.compiler.target>1.8</maven.compiler.target>
+    </properties>
+
+    <dependencies>
+    </dependencies>
+
+    <build>
+        <finalName>bigdata</finalName>
+    </build>
+</project>

+ 33 - 0
common-service/pom.xml

@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>push</artifactId>
+        <groupId>org.diagbot</groupId>
+        <version>1.0.0</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>common-service</artifactId>
+
+    <name>common-service</name>
+    <!-- FIXME change it to the project's website -->
+    <url>http://www.example.com</url>
+
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.diagbot</groupId>
+            <artifactId>public</artifactId>
+            <version>1.0.0</version>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <finalName>common-service</finalName>
+    </build>
+</project>

+ 27 - 0
common-service/src/main/java/org/diagbot/common/dao/mapper/StandardInfoMapper.java

@@ -0,0 +1,27 @@
+/** 
+* @Company: 杭州朗通信息技术有限公司
+* @Department: 医疗事业部
+* @Description: 互动反馈系统 
+* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
+*/
+package org.diagbot.common.dao.mapper;
+
+import org.diagbot.common.dao.model.StandardInfo;
+import org.diagbot.common.dao.model.wrapper.StandardInfoWrapper;
+import org.diagbot.pub.orm.EntityMapper;
+
+import java.util.List;
+import java.util.Map;
+
+/** 
+* @Title: StandardInfo.java
+* @Package: com.zjlantone.nlp.web.kl.dao.model
+* @Description: 数据库操作接口类 
+* @author: 楼辉荣
+* @date: 2016年8月8日 下午17:16:23
+* @version: V1.0
+*/
+public interface StandardInfoMapper extends EntityMapper<StandardInfo, StandardInfoWrapper, Long> {
+    public List<StandardInfoWrapper> selectNullCategoryIdWrapper();
+    List<StandardInfo> selectNotNullRelationTerm(Map<String, Object> map);
+}

+ 95 - 0
common-service/src/main/java/org/diagbot/common/dao/model/StandardInfo.java

@@ -0,0 +1,95 @@
+package org.diagbot.common.dao.model;
+
+
+public class StandardInfo {
+    private Long id;
+    private String code;
+    private String name;
+    private String status;
+    private Integer categoryId;
+    private String category;
+    private String relationId;
+    private String relationType;
+    private String relationName;
+    private String remark;
+
+    public Long getId() {
+        return id;
+    }
+
+    public void setId(Long id) {
+        this.id = id;
+    }
+
+    public String getCode() {
+        return code;
+    }
+
+    public void setCode(String code) {
+        this.code = code;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public Integer getCategoryId() {
+        return categoryId;
+    }
+
+    public void setCategoryId(Integer categoryId) {
+        this.categoryId = categoryId;
+    }
+
+    public String getCategory() {
+        return category;
+    }
+
+    public void setCategory(String category) {
+        this.category = category;
+    }
+
+    public String getRelationId() {
+        return relationId;
+    }
+
+    public void setRelationId(String relationId) {
+        this.relationId = relationId;
+    }
+
+    public String getRelationType() {
+        return relationType;
+    }
+
+    public void setRelationType(String relationType) {
+        this.relationType = relationType;
+    }
+
+    public String getRelationName() {
+        return relationName;
+    }
+
+    public void setRelationName(String relationName) {
+        this.relationName = relationName;
+    }
+
+    public String getStatus() {
+        return status;
+    }
+
+    public void setStatus(String status) {
+        this.status = status;
+    }
+
+    public String getRemark() {
+        return remark;
+    }
+
+    public void setRemark(String remark) {
+        this.remark = remark;
+    }
+}

+ 6 - 0
common-service/src/main/java/org/diagbot/common/dao/model/wrapper/StandardInfoWrapper.java

@@ -0,0 +1,6 @@
+package org.diagbot.common.dao.model.wrapper;
+
+import org.diagbot.common.dao.model.StandardInfo;
+
+public class StandardInfoWrapper extends StandardInfo {
+}

+ 150 - 0
common-service/src/main/java/org/diagbot/common/dao/xml/StandardInfoMapper.xml

@@ -0,0 +1,150 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" 
+    "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
+<mapper namespace="org.diagbot.common.dao.mapper.StandardInfoMapper">
+	<!-- 映射定义列-->
+	<resultMap type="org.diagbot.common.dao.model.StandardInfo" id="standardInfoMap">
+		<id property="id" column="id"/>
+		<result property="code" column="code"/>
+		<result property="name" column="name"/>
+		<result property="status" column="status"/>
+		<result property="categoryId" column="category_id"/>
+		<result property="category" column="category"/>
+		<result property="relationType" column="relation_type"/>
+		<result property="relationId" column="relation_id"/>
+		<result property="relationName" column="relation_name"/>
+		<result property="remark" column="remark"/>
+	</resultMap>
+
+	<!-- 映射定义列-->
+	<resultMap type="org.diagbot.common.dao.model.wrapper.StandardInfoWrapper" id="standardInfoWrapperMap">
+		<id property="id" column="id"/>
+		<result property="code" column="code"/>
+		<result property="name" column="name"/>
+		<result property="status" column="status"/>
+		<result property="categoryId" column="category_id"/>
+		<result property="category" column="category"/>
+		<result property="relationType" column="relation_type"/>
+		<result property="relationId" column="relation_id"/>
+		<result property="relationName" column="relation_name"/>
+		<result property="remark" column="remark"/>
+	</resultMap>
+
+	<!-- 通用查询结果列-->
+	<sql id="Base_Column_List">
+		 t.id, t.code,	 t.name,	 t.status,	  t.category_id,	 t.category,	 t.relation_type,	 t.relation_id,	 t.relation_name,	 t.remark
+	</sql>
+
+	<!-- 查询(根据主键ID查询) -->
+	<select id="selectByPrimaryKey" resultMap="standardInfoMap" parameterType="java.lang.Long">
+		 SELECT
+		 <include refid="Base_Column_List" />
+		 FROM kl_standard_info t
+		 WHERE t.id = #{id}
+	</select>
+
+	<!-- 查询(根据主键ID查询) -->
+	<select id="selectWrapperByPrimaryKey" resultMap="standardInfoWrapperMap" parameterType="java.lang.Long">
+		 SELECT
+		 <include refid="Base_Column_List" />
+		 FROM kl_standard_info t
+		 WHERE t.id = #{id}
+	</select>
+
+	<!-- 依据Map查询条件返回结果集-->
+	<select id="selectList" resultMap="standardInfoMap" parameterType="java.util.Map">
+		SELECT
+		distinct t.name, t.category_id
+		FROM kl_standard_info t WHERE category_id != '100'
+		<if test="id != null and id != ''">
+				 and t.id = #{id}
+		</if>
+		<if test="code != null and code != ''">
+				 and t.code = #{code}
+		</if>
+		<if test="name != null and name != ''">
+				 and t.name = #{name}
+		</if>
+		<if test="status != null and status != ''">
+				 and t.status = #{status}
+		</if>
+		<if test="categoryId != null and categoryId != ''">
+				 and t.category_id = #{categoryId}
+		</if>
+		<if test="category != null and category != ''">
+				 and t.category = #{category}
+		</if>
+		<if test="relationType != null and relationType != ''">
+				 and t.relation_type = #{relationType}
+		</if>
+		<if test="relationId != null and relationId != ''">
+				 and t.relation_id = #{relationId}
+		</if>
+		<if test="relationName != null and relationName != ''">
+				 and t.relation_name = #{relationName}
+		</if>
+		<if test="remark != null and remark != ''">
+				 and t.remark = #{remark}
+		</if>
+	</select>
+
+	<!-- 依据Map查询条件返回扩展属性结果集-->
+	<select id="selectNullCategoryIdWrapper" resultMap="standardInfoWrapperMap" parameterType="java.util.Map">
+		SELECT
+		<include refid="Base_Column_List"/>
+		FROM kl_standard_info t WHERE category_id is null or category_id = ''
+	</select>
+
+	<!-- 依据Map查询条件返回扩展属性结果集-->
+	<select id="selectNotNullRelationTerm" resultMap="standardInfoMap" parameterType="java.util.Map">
+		SELECT
+		<include refid="Base_Column_List"/>
+		FROM kl_standard_info t WHERE relation_name is not null and relation_name != ''
+		<if test="relationType != null and relationType != ''">
+			and relation_type = #{relationType}
+		</if>
+	</select>
+
+	<!-- 依据Map查询条件返回扩展属性结果集-->
+	<select id="selectListWrapper" resultMap="standardInfoWrapperMap" parameterType="java.util.Map">
+		SELECT 
+		<include refid="Base_Column_List"/>
+		FROM kl_standard_info t WHERE 1=1 
+		<if test="id != null and id != ''">
+				 and t.id = #{id}
+		</if>
+		<if test="code != null and code != ''">
+				 and t.code = #{code}
+		</if>
+		<if test="name != null and name != ''">
+			and t.name like concat('%',#{name},'%')
+		</if>
+		<if test="status != null and status != ''">
+				 and t.status = #{status}
+		</if>
+		<if test="categoryId != null and categoryId != ''">
+				 and t.category_id = #{categoryId}
+		</if>
+		<if test="category != null and category != ''">
+			and t.category like concat('%',#{category},'%')
+		</if>
+		<if test="relationType != null and relationType != ''">
+				 and t.relation_type = #{relationType}
+		</if>
+		<if test="relationId != null and relationId != ''">
+				 and t.relation_id = #{relationId}
+		</if>
+		<if test="relationName != null and relationName != ''">
+				 and t.relation_name = #{relationName}
+		</if>
+		<if test="remark != null and remark != ''">
+				 and t.remark = #{remark}
+		</if>
+	</select> 
+
+	<!--删除:根据主键ID删除-->
+	<delete id="deleteByPrimaryKey" parameterType="java.lang.Long">
+		 DELETE FROM kl_standard_info
+		 WHERE id = #{id}
+	</delete>
+</mapper>

+ 13 - 0
common-service/src/main/java/org/diagbot/common/service/StandardInfoService.java

@@ -0,0 +1,13 @@
+package org.diagbot.common.service;
+
+import org.diagbot.common.dao.model.StandardInfo;
+import org.diagbot.common.dao.model.wrapper.StandardInfoWrapper;
+import org.diagbot.pub.service.BaseService;
+
+import java.util.List;
+import java.util.Map;
+
+public interface StandardInfoService extends BaseService<StandardInfo, StandardInfoWrapper, Long> {
+    public List<StandardInfoWrapper> selectNullCategoryIdWrapper();
+    List<StandardInfo> selectNotNullRelationTerm(Map<String, Object> map);
+}

+ 31 - 0
common-service/src/main/java/org/diagbot/common/service/impl/StandardInfoServiceImpl.java

@@ -0,0 +1,31 @@
+package org.diagbot.common.service.impl;
+
+import org.diagbot.common.dao.mapper.StandardInfoMapper;
+import org.diagbot.common.dao.model.StandardInfo;
+import org.diagbot.common.dao.model.wrapper.StandardInfoWrapper;
+import org.diagbot.common.service.StandardInfoService;
+import org.diagbot.pub.service.BaseServiceImpl;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import java.util.List;
+import java.util.Map;
+
+@Service
+public class StandardInfoServiceImpl extends BaseServiceImpl<StandardInfo, StandardInfoWrapper, Long> implements StandardInfoService {
+    @Autowired
+    StandardInfoMapper standardInfoMapper;
+
+    @Autowired
+    private void setEntityMapper() {
+        super.setEntityMapper(standardInfoMapper);
+    }
+
+    public List<StandardInfoWrapper> selectNullCategoryIdWrapper() {
+        return standardInfoMapper.selectNullCategoryIdWrapper();
+    }
+
+    public List<StandardInfo> selectNotNullRelationTerm(Map<String, Object> map) {
+        return standardInfoMapper.selectNotNullRelationTerm(map);
+    }
+}

+ 25 - 0
graph-web/.gitignore

@@ -0,0 +1,25 @@
+/target/
+!.mvn/wrapper/maven-wrapper.jar
+
+### STS ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
+
+### IntelliJ IDEA ###
+.idea
+*.iws
+*.iml
+*.ipr
+
+### NetBeans ###
+/nbproject/private/
+/build/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/

BIN
graph-web/.mvn/wrapper/maven-wrapper.jar


+ 1 - 0
graph-web/.mvn/wrapper/maven-wrapper.properties

@@ -0,0 +1 @@
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip

+ 286 - 0
graph-web/mvnw

@@ -0,0 +1,286 @@
+#!/bin/sh
+# ----------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ----------------------------------------------------------------------------
+
+# ----------------------------------------------------------------------------
+# Maven2 Start Up Batch script
+#
+# Required ENV vars:
+# ------------------
+#   JAVA_HOME - location of a JDK home dir
+#
+# Optional ENV vars
+# -----------------
+#   M2_HOME - location of maven2's installed home dir
+#   MAVEN_OPTS - parameters passed to the Java VM when running Maven
+#     e.g. to debug Maven itself, use
+#       set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+#   MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+# ----------------------------------------------------------------------------
+
+if [ -z "$MAVEN_SKIP_RC" ] ; then
+
+  if [ -f /etc/mavenrc ] ; then
+    . /etc/mavenrc
+  fi
+
+  if [ -f "$HOME/.mavenrc" ] ; then
+    . "$HOME/.mavenrc"
+  fi
+
+fi
+
+# OS specific support.  $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+mingw=false
+case "`uname`" in
+  CYGWIN*) cygwin=true ;;
+  MINGW*) mingw=true;;
+  Darwin*) darwin=true
+    # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
+    # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
+    if [ -z "$JAVA_HOME" ]; then
+      if [ -x "/usr/libexec/java_home" ]; then
+        export JAVA_HOME="`/usr/libexec/java_home`"
+      else
+        export JAVA_HOME="/Library/Java/Home"
+      fi
+    fi
+    ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+  if [ -r /etc/gentoo-release ] ; then
+    JAVA_HOME=`java-config --jre-home`
+  fi
+fi
+
+if [ -z "$M2_HOME" ] ; then
+  ## resolve links - $0 may be a link to maven's home
+  PRG="$0"
+
+  # need this for relative symlinks
+  while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+      PRG="$link"
+    else
+      PRG="`dirname "$PRG"`/$link"
+    fi
+  done
+
+  saveddir=`pwd`
+
+  M2_HOME=`dirname "$PRG"`/..
+
+  # make it fully qualified
+  M2_HOME=`cd "$M2_HOME" && pwd`
+
+  cd "$saveddir"
+  # echo Using m2 at $M2_HOME
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME=`cygpath --unix "$M2_HOME"`
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+  [ -n "$CLASSPATH" ] &&
+    CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# For Mingw, ensure paths are in UNIX format before anything is touched
+if $mingw ; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME="`(cd "$M2_HOME"; pwd)`"
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
+  # TODO classpath?
+fi
+
+if [ -z "$JAVA_HOME" ]; then
+  javaExecutable="`which javac`"
+  if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
+    # readlink(1) is not available as standard on Solaris 10.
+    readLink=`which readlink`
+    if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
+      if $darwin ; then
+        javaHome="`dirname \"$javaExecutable\"`"
+        javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
+      else
+        javaExecutable="`readlink -f \"$javaExecutable\"`"
+      fi
+      javaHome="`dirname \"$javaExecutable\"`"
+      javaHome=`expr "$javaHome" : '\(.*\)/bin'`
+      JAVA_HOME="$javaHome"
+      export JAVA_HOME
+    fi
+  fi
+fi
+
+if [ -z "$JAVACMD" ] ; then
+  if [ -n "$JAVA_HOME"  ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+      # IBM's JDK on AIX uses strange locations for the executables
+      JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+      JAVACMD="$JAVA_HOME/bin/java"
+    fi
+  else
+    JAVACMD="`which java`"
+  fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+  echo "Error: JAVA_HOME is not defined correctly." >&2
+  echo "  We cannot execute $JAVACMD" >&2
+  exit 1
+fi
+
+if [ -z "$JAVA_HOME" ] ; then
+  echo "Warning: JAVA_HOME environment variable is not set."
+fi
+
+CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
+
+# traverses directory structure from process work directory to filesystem root
+# first directory with .mvn subdirectory is considered project base directory
+find_maven_basedir() {
+
+  if [ -z "$1" ]
+  then
+    echo "Path not specified to find_maven_basedir"
+    return 1
+  fi
+
+  basedir="$1"
+  wdir="$1"
+  while [ "$wdir" != '/' ] ; do
+    if [ -d "$wdir"/.mvn ] ; then
+      basedir=$wdir
+      break
+    fi
+    # workaround for JBEAP-8937 (on Solaris 10/Sparc)
+    if [ -d "${wdir}" ]; then
+      wdir=`cd "$wdir/.."; pwd`
+    fi
+    # end of workaround
+  done
+  echo "${basedir}"
+}
+
+# concatenates all lines of a file
+concat_lines() {
+  if [ -f "$1" ]; then
+    echo "$(tr -s '\n' ' ' < "$1")"
+  fi
+}
+
+BASE_DIR=`find_maven_basedir "$(pwd)"`
+if [ -z "$BASE_DIR" ]; then
+  exit 1;
+fi
+
+##########################################################################################
+# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+# This allows using the maven wrapper in projects that prohibit checking in binary data.
+##########################################################################################
+if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Found .mvn/wrapper/maven-wrapper.jar"
+    fi
+else
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
+    fi
+    jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
+    while IFS="=" read key value; do
+      case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
+      esac
+    done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Downloading from: $jarUrl"
+    fi
+    wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
+
+    if command -v wget > /dev/null; then
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Found wget ... using wget"
+        fi
+        wget "$jarUrl" -O "$wrapperJarPath"
+    elif command -v curl > /dev/null; then
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Found curl ... using curl"
+        fi
+        curl -o "$wrapperJarPath" "$jarUrl"
+    else
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Falling back to using Java to download"
+        fi
+        javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
+        if [ -e "$javaClass" ]; then
+            if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+                if [ "$MVNW_VERBOSE" = true ]; then
+                  echo " - Compiling MavenWrapperDownloader.java ..."
+                fi
+                # Compiling the Java class
+                ("$JAVA_HOME/bin/javac" "$javaClass")
+            fi
+            if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+                # Running the downloader
+                if [ "$MVNW_VERBOSE" = true ]; then
+                  echo " - Running MavenWrapperDownloader.java ..."
+                fi
+                ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
+            fi
+        fi
+    fi
+fi
+##########################################################################################
+# End of extension
+##########################################################################################
+
+export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
+if [ "$MVNW_VERBOSE" = true ]; then
+  echo $MAVEN_PROJECTBASEDIR
+fi
+MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME=`cygpath --path --windows "$M2_HOME"`
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+  [ -n "$CLASSPATH" ] &&
+    CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+  [ -n "$MAVEN_PROJECTBASEDIR" ] &&
+    MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
+fi
+
+WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+exec "$JAVACMD" \
+  $MAVEN_OPTS \
+  -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
+  "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
+  ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"

+ 161 - 0
graph-web/mvnw.cmd

@@ -0,0 +1,161 @@
+@REM ----------------------------------------------------------------------------
+@REM Licensed to the Apache Software Foundation (ASF) under one
+@REM or more contributor license agreements.  See the NOTICE file
+@REM distributed with this work for additional information
+@REM regarding copyright ownership.  The ASF licenses this file
+@REM to you under the Apache License, Version 2.0 (the
+@REM "License"); you may not use this file except in compliance
+@REM with the License.  You may obtain a copy of the License at
+@REM
+@REM    http://www.apache.org/licenses/LICENSE-2.0
+@REM
+@REM Unless required by applicable law or agreed to in writing,
+@REM software distributed under the License is distributed on an
+@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@REM KIND, either express or implied.  See the License for the
+@REM specific language governing permissions and limitations
+@REM under the License.
+@REM ----------------------------------------------------------------------------
+
+@REM ----------------------------------------------------------------------------
+@REM Maven2 Start Up Batch script
+@REM
+@REM Required ENV vars:
+@REM JAVA_HOME - location of a JDK home dir
+@REM
+@REM Optional ENV vars
+@REM M2_HOME - location of maven2's installed home dir
+@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
+@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
+@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
+@REM     e.g. to debug Maven itself, use
+@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+@REM ----------------------------------------------------------------------------
+
+@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
+@echo off
+@REM set title of command window
+title %0
+@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
+@if "%MAVEN_BATCH_ECHO%" == "on"  echo %MAVEN_BATCH_ECHO%
+
+@REM set %HOME% to equivalent of $HOME
+if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
+
+@REM Execute a user defined script before this one
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
+@REM check for pre script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
+if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
+:skipRcPre
+
+@setlocal
+
+set ERROR_CODE=0
+
+@REM To isolate internal variables from possible post scripts, we use another setlocal
+@setlocal
+
+@REM ==== START VALIDATION ====
+if not "%JAVA_HOME%" == "" goto OkJHome
+
+echo.
+echo Error: JAVA_HOME not found in your environment. >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+:OkJHome
+if exist "%JAVA_HOME%\bin\java.exe" goto init
+
+echo.
+echo Error: JAVA_HOME is set to an invalid directory. >&2
+echo JAVA_HOME = "%JAVA_HOME%" >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+@REM ==== END VALIDATION ====
+
+:init
+
+@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
+@REM Fallback to current working directory if not found.
+
+set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
+IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
+
+set EXEC_DIR=%CD%
+set WDIR=%EXEC_DIR%
+:findBaseDir
+IF EXIST "%WDIR%"\.mvn goto baseDirFound
+cd ..
+IF "%WDIR%"=="%CD%" goto baseDirNotFound
+set WDIR=%CD%
+goto findBaseDir
+
+:baseDirFound
+set MAVEN_PROJECTBASEDIR=%WDIR%
+cd "%EXEC_DIR%"
+goto endDetectBaseDir
+
+:baseDirNotFound
+set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
+cd "%EXEC_DIR%"
+
+:endDetectBaseDir
+
+IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
+
+@setlocal EnableExtensions EnableDelayedExpansion
+for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
+@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
+
+:endReadAdditionalConfig
+
+SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
+set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
+set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
+FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
+	IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 
+)
+
+@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
+if exist %WRAPPER_JAR% (
+    echo Found %WRAPPER_JAR%
+) else (
+    echo Couldn't find %WRAPPER_JAR%, downloading it ...
+	echo Downloading from: %DOWNLOAD_URL%
+    powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
+    echo Finished downloading %WRAPPER_JAR%
+)
+@REM End of extension
+
+%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
+if ERRORLEVEL 1 goto error
+goto end
+
+:error
+set ERROR_CODE=1
+
+:end
+@endlocal & set ERROR_CODE=%ERROR_CODE%
+
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
+@REM check for post script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
+if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
+:skipRcPost
+
+@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
+if "%MAVEN_BATCH_PAUSE%" == "on" pause
+
+if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
+
+exit /B %ERROR_CODE%

+ 83 - 0
graph-web/pom.xml

@@ -0,0 +1,83 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>org.springframework.boot</groupId>
+		<artifactId>spring-boot-starter-parent</artifactId>
+		<version>2.1.2.RELEASE</version>
+		<relativePath/> <!-- lookup parent from repository -->
+	</parent>
+	<groupId>org.diagbot</groupId>
+	<artifactId>graph-web</artifactId>
+	<version>0.0.1-SNAPSHOT</version>
+	<name>graph-web</name>
+	<description>Demo project for Spring Boot</description>
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+		<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+		<java.version>1.8</java.version>
+		<druid.version>1.0.23</druid.version>
+		<mybatis.version>1.1.1</mybatis.version>
+		<mysql.version>5.1.38</mysql.version>
+		<swagger2.version>2.7.0</swagger2.version>
+	</properties>
+
+	<dependencies>
+		<dependency>
+			<groupId>org.diagbot</groupId>
+			<artifactId>public</artifactId>
+			<version>1.0.0</version>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter-web</artifactId>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.boot</groupId>
+			<artifactId>spring-boot-starter-test</artifactId>
+			<scope>test</scope>
+		</dependency>
+
+		<dependency>
+			<groupId>mysql</groupId>
+			<artifactId>mysql-connector-java</artifactId>
+			<version>${mysql.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>com.alibaba</groupId>
+			<artifactId>druid</artifactId>
+			<version>${druid.version}</version>
+		</dependency>
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.springframework.boot</groupId>
+				<artifactId>spring-boot-maven-plugin</artifactId>
+			</plugin>
+		</plugins>
+
+		<resources>
+			<resource>
+				<directory>src/main/java</directory>
+				<includes>
+					<include>**/*.properties</include>
+					<include>**/*.xml</include>
+					<include>**/*.json</include>
+				</includes>
+				<filtering>false</filtering>
+			</resource>
+			<resource>
+				<directory>src/main/resources</directory>
+				<includes>
+					<include>**/*.yml</include>
+				</includes>
+			</resource>
+		</resources>
+		<finalName>graph-web</finalName>
+	</build>
+
+</project>

+ 14 - 0
graph-web/src/main/java/org/diagbot/graph/GraphWebApplication.java

@@ -0,0 +1,14 @@
+package org.diagbot.graph;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+@SpringBootApplication
+public class GraphWebApplication {
+
+	public static void main(String[] args) {
+		SpringApplication.run(GraphWebApplication.class, args);
+	}
+
+}
+

+ 38 - 0
graph-web/src/main/resources/application.yml

@@ -0,0 +1,38 @@
+server:
+  port: 5003 # 端口号
+  servlet:
+    context-path: ${spring.application.name}  # 访问路径,如果不配置,访问IP:端口号,配置后访问IP:端口号/${context-path}
+
+spring:
+  application:
+    name: /graph-web    # 项目名称
+  http:
+    encoding:     # http编码
+      force: true
+      charset: UTF-8
+      enabled: true
+  datasource:       # mybatis 配置,使用druid数据源
+    url: jdbc:mysql://192.168.2.235:3306/graph-web?useUnicode=true&characterEncoding=UTF-8
+    username: root
+    password: diagbot@20180822
+    type: com.alibaba.druid.pool.DruidDataSource
+    driver-class-name: com.mysql.jdbc.Driver
+    filters: stat
+    maxActive: 20
+    initialSize: 1
+    maxWait: 60000
+    minIdle: 1
+    timeBetweenEvictionRunsMillis: 60000
+    minEvictableIdleTimeMillis: 300000
+    validationQuery: select 'x'
+    testWhileIdle: true
+    testOnBorrow: false
+    testOnReturn: false
+    poolPreparedStatements: true
+    maxOpenPreparedStatements: 20
+
+logging:          # 日志
+  level.root: info
+  level.org.diagbot: debug
+  path: logs/
+  file: graph-web.log

+ 50 - 0
graph/pom.xml

@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>push</artifactId>
+        <groupId>org.diagbot</groupId>
+        <version>1.0.0</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>graph</artifactId>
+
+    <name>graph</name>
+    <!-- FIXME change it to the project's website -->
+    <url>http://www.example.com</url>
+
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <maven.compiler.source>1.8</maven.compiler.source>
+        <maven.compiler.target>1.8</maven.compiler.target>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.neo4j</groupId>
+            <artifactId>neo4j</artifactId>
+            <version>3.2.9</version>
+        </dependency>
+        <dependency>
+            <groupId>org.neo4j</groupId>
+            <artifactId>neo4j-ogm-embedded-driver</artifactId>
+            <version>3.1.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.neo4j</groupId>
+            <artifactId>neo4j-ogm-http-driver</artifactId>
+            <version>3.1.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.neo4j.driver</groupId>
+            <artifactId>neo4j-java-driver</artifactId>
+            <version>1.6.1</version>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <finalName>graph</finalName>
+    </build>
+</project>

+ 16 - 0
graph/src/main/java/org/diagbot/graph/annotation/FromProperty.java

@@ -0,0 +1,16 @@
+package org.diagbot.graph.annotation;
+
+import java.lang.annotation.*;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/9/26/026 15:00
+ * @Description:
+ */
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.FIELD})
+@Inherited
+public @interface FromProperty {
+    String value() default "";
+}

+ 14 - 0
graph/src/main/java/org/diagbot/graph/annotation/RelationName.java

@@ -0,0 +1,14 @@
+package org.diagbot.graph.annotation;
+
+import java.lang.annotation.*;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/9/26/026 15:10
+ * @Description:
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.FIELD})
+@Inherited
+public @interface RelationName {
+}

+ 16 - 0
graph/src/main/java/org/diagbot/graph/annotation/ToProperty.java

@@ -0,0 +1,16 @@
+package org.diagbot.graph.annotation;
+
+import java.lang.annotation.*;
+
+/**
+ * @Auther: fyeman
+ * @Date: 2018/9/26/026 15:00
+ * @Description:
+ */
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.FIELD})
+@Inherited
+public @interface ToProperty {
+    String value() default "";
+}

+ 35 - 0
graph/src/main/java/org/diagbot/graph/javabean/Drugs.java

@@ -0,0 +1,35 @@
+package org.diagbot.graph.javabean;
+
+import java.util.LinkedList;
+
+/**
+ * 药类
+ */
+public class Drugs {
+    private String drugsName;//药类名
+    private LinkedList<Medicition> medicitionsList;
+
+    public String getDrugsName() {
+        return drugsName;
+    }
+
+    public void setDrugsName(String drugsName) {
+        this.drugsName = drugsName;
+    }
+
+    public LinkedList<Medicition> getMedicitionsList() {
+        return medicitionsList;
+    }
+
+    public void setMedicitionsList(LinkedList<Medicition> medicitionsList) {
+        this.medicitionsList = medicitionsList;
+    }
+
+    public Drugs() {
+    }
+
+    public Drugs(String drugsName, LinkedList<Medicition> medicitionsList) {
+        this.drugsName = drugsName;
+        this.medicitionsList = medicitionsList;
+    }
+}

+ 15 - 0
graph/src/main/java/org/diagbot/graph/javabean/Filnlly.java

@@ -0,0 +1,15 @@
+package org.diagbot.graph.javabean;
+
+import java.util.ArrayList;
+
+public class Filnlly {
+    private ArrayList<Drugs> treatment;
+
+    public ArrayList<Drugs> getTreatment() {
+        return treatment;
+    }
+
+    public void setTreatment(ArrayList<Drugs> treatment) {
+        this.treatment = treatment;
+    }
+}

+ 53 - 0
graph/src/main/java/org/diagbot/graph/javabean/Medicition.java

@@ -0,0 +1,53 @@
+package org.diagbot.graph.javabean;
+
+/**
+ * 药
+ */
+public class Medicition {
+    private String medicitionName;//药名
+    private String rate;//使用率
+    private Integer isShow;//0不展示,1展示
+    private Integer forbidden;// 0:正常,1:慎用,2:禁忌"
+
+    public String getMedicitionName() {
+        return medicitionName;
+    }
+
+    public void setMedicitionName(String medicitionName) {
+        this.medicitionName = medicitionName;
+    }
+
+    public String getRate() {
+        return rate;
+    }
+
+    public void setRate(String rate) {
+        this.rate = rate;
+    }
+
+    public Integer getIsShow() {
+        return isShow;
+    }
+
+    public void setIsShow(Integer isShow) {
+        this.isShow = isShow;
+    }
+
+    public Integer getForbidden() {
+        return forbidden;
+    }
+
+    public void setForbidden(Integer forbidden) {
+        this.forbidden = forbidden;
+    }
+
+    public Medicition() {
+    }
+
+    public Medicition(String medicitionName, String rate, Integer isShow, Integer forbidden) {
+        this.medicitionName = medicitionName;
+        this.rate = rate;
+        this.isShow = isShow;
+        this.forbidden = forbidden;
+    }
+}

+ 0 - 0
graph/src/main/java/org/diagbot/graph/jdbc/DriverManager.java


Some files were not shown because too many files changed in this diff