فهرست منبع

Merge remote-tracking branch 'origin/push-dev' into push-dev-similar

louhr 5 سال پیش
والد
کامیت
a2ed86b8fb
32فایلهای تغییر یافته به همراه1856 افزوده شده و 1106 حذف شده
  1. 16 18
      algorithm/src/main/java/org/algorithm/core/neural/TensorFlowModelLoadFactory.java
  2. 94 36
      algorithm/src/main/java/org/algorithm/core/neural/TensorflowModel.java
  3. 202 17
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSet.java
  4. 78 7
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImpl.java
  5. 15 0
      algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImplNonParallel.java
  6. 24 44
      algorithm/src/main/java/org/algorithm/test/Test.java
  7. 1 1
      algorithm/src/main/resources/algorithm.properties
  8. 21 4
      bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java
  9. 1 1
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java
  10. 63 0
      common-push/src/main/java/org/diagbot/common/push/bean/RelevantFeature.java
  11. 10 0
      common-push/src/main/java/org/diagbot/common/push/bean/SearchData.java
  12. 155 0
      common-push/src/main/java/org/diagbot/common/push/cache/ApplicationCacheUtil.java
  13. 64 4
      common-push/src/main/java/org/diagbot/common/push/cache/CacheFileManager.java
  14. 30 0
      common-push/src/main/java/org/diagbot/common/push/naivebayes/NaiveBayesTest.java
  15. 92 0
      common-push/src/main/java/org/diagbot/common/push/naivebayes/core/AlgorithmNaiveBayesExecutor.java
  16. 34 0
      common-push/src/main/java/org/diagbot/common/push/naivebayes/factory/AlgorithmNaiveBayesFactory.java
  17. 26 19
      common-push/src/main/java/org/diagbot/common/push/work/ParamsDataProxy.java
  18. 23 7
      graph-web/src/main/java/org/diagbot/graphWeb/work/GraphCalculate.java
  19. 352 875
      graph/src/main/java/org/diagbot/graph/jdbc/Neo4jAPI.java
  20. 0 1
      graph/src/main/java/org/diagbot/graph/jdbc/gdbtest.java
  21. 23 18
      graph/src/main/resources/bolt.properties
  22. 5 0
      nlp-web/pom.xml
  23. 26 5
      nlp-web/src/main/java/org/diagbot/nlp/controller/FeatureController.java
  24. 1 1
      nlp-web/src/main/resources/application.yml
  25. 4 4
      nlp/src/main/java/org/diagbot/nlp/feature/FeatureAnalyze.java
  26. 6 13
      nlp/src/main/java/org/diagbot/nlp/feature/extract/CaseToken.java
  27. 3 1
      nlp/src/main/java/org/diagbot/nlp/feature/extract/CaseTokenFeature.java
  28. 7 1
      nlp/src/main/java/org/diagbot/nlp/util/NegativeEnum.java
  29. 6 2
      nlp/src/main/resources/nlp.properties
  30. 11 25
      push-web/src/main/java/org/diagbot/push/controller/AlgorithmController.java
  31. 2 2
      push-web/src/main/resources/static/pages/algorithm/list.html
  32. 461 0
      push-web/src/main/resources/static/pages/eyehospital/list.html

+ 16 - 18
algorithm/src/main/java/org/algorithm/core/neural/TensorFlowModelLoadFactory.java

@@ -6,6 +6,7 @@ import org.diagbot.pub.utils.PropertiesUtil;
 
 /**
  * Tensorlflow 模型加载工厂
+ *
  * @Author: bijl
  * @Date: 2018年7月19日-下午7:28:58
  * @Description:
@@ -14,32 +15,31 @@ public class TensorFlowModelLoadFactory {
 
     /**
      * 加载并创建模型类
-     * @param modelVersion  模型版本号
+     *
+     * @param modelVersion 模型版本号
      * @return 模型
      */
     public static TensorflowModel create(String modelVersion) {
-        
-        
+
+
         PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
-        
-        String inputOpName = "X";  // 统一输入op名称
-        String outputOpName = "softmax/softmax";  // 统一输出op名称
-        
+
+
 //        NNDataSet dataSet = new NNDataSetImplNonParallel(modelVersion);  // 新模型
         NNDataSet dataSet = new NNDataSetImpl(modelVersion);  // 老模型
 
-        String modelPath =prop.getProperty("basicPath");  // 模型基本路径
+        String modelPath = prop.getProperty("basicPath");  // 模型基本路径
         modelVersion = prop.getProperty(modelVersion);
         modelPath = modelPath.replace("model_version_replacement", modelVersion);  // 生成模型路径
-        
-        TensorflowModel tm = new TensorflowModel(modelPath, inputOpName, outputOpName,
-                dataSet);
+
+        TensorflowModel tm = new TensorflowModel(modelPath, dataSet);
         return tm;
     }
 
     /**
      * 加载并创建模型类
-     * @param modelVersion  模型版本号
+     *
+     * @param modelVersion 模型版本号
      * @return 模型
      */
     public static TensorflowModel createAndFilterDiagnosis(String modelVersion) {
@@ -47,20 +47,18 @@ public class TensorFlowModelLoadFactory {
 
         PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
 
-        String inputOpName = "X";  // 统一输入op名称
-        String outputOpName = "softmax/softmax";  // 统一输出op名称
-
         NNDataSet dataSet = new NNDataSetImpl(modelVersion);  // 老模型
 
         dataSet.setDoFilterDiagnosis(true);
         dataSet.readFilterDiagnosisDict();
+        dataSet.setWithSequenceInputs(true);  // 使用序列输入
+        dataSet.readChar2IdDict(modelVersion);  // 读取字符字典
 
-        String modelPath =prop.getProperty("basicPath");  // 模型基本路径
+        String modelPath = prop.getProperty("basicPath");  // 模型基本路径
         modelVersion = prop.getProperty(modelVersion);
         modelPath = modelPath.replace("model_version_replacement", modelVersion);  // 生成模型路径
 
-        TensorflowModel tm = new TensorflowModel(modelPath, inputOpName, outputOpName,
-                dataSet);
+        TensorflowModel tm = new TensorflowModel(modelPath, dataSet);
         return tm;
     }
 

+ 94 - 36
algorithm/src/main/java/org/algorithm/core/neural/TensorflowModel.java

@@ -6,43 +6,55 @@ import org.tensorflow.Session;
 import org.tensorflow.Tensor;
 
 import java.nio.FloatBuffer;
+import java.nio.IntBuffer;
+import java.util.HashMap;
 import java.util.Map;
 
 /**
  * tensorflow 模型类,要求单个样本是1维向量,而不是高维向量
+ *
  * @Author: bijl
  * @Date: 2018年7月19日-下午7:21:24
  * @Description:
  */
 public class TensorflowModel {
-    
-    private final String INPUT_OPERATION_NAME;   // 输入op的名称
-    private final String OUTPUT_OPERATION_NAME;  // 输出op的名称
+
+
+    private final String X = "X";  // 输入op x的名字
+    private final String Char_ids = "Char_ids";  // 输入op Char_ids的名字
+    private final String Pos_ids = "Pos_ids";  // 输入op Pos_ids的名字
+    private final String SOFT_MAX = "softmax/softmax";  // 输出op的名称
+
     private final int NUM_FEATURE;  // 特征个数
     private final int NUM_LABEL;  //  标签(类别)个数
     private SavedModelBundle bundle; // 模型捆绑
     private Session session;  // 会话
     private NNDataSet dataSet;  // 数据集
-    
+
+
+    private boolean withSequenceInputs = false;  // 是否带有序列输入
+    private final int MAX_LEN; // 最大长度
+
+
     /**
-     * 
-     * @param exportDir  模型保存地址
-     * @param inputOpName  输入op的名称
-     * @param outputOpName  输出op的名称
-     * @param dataSet  模型使用的数据集
+     * @param exportDir 模型保存地址
+     * @param dataSet   模型使用的数据集
      */
-    public TensorflowModel(String exportDir, String inputOpName, String outputOpName, NNDataSet dataSet) {
-        this.INPUT_OPERATION_NAME = inputOpName;
-        this.OUTPUT_OPERATION_NAME = outputOpName;
+    public TensorflowModel(String exportDir, NNDataSet dataSet) {
+
+        this.init(exportDir);
         this.dataSet = dataSet;
         this.NUM_FEATURE = this.dataSet.getNumFeature();
         this.NUM_LABEL = this.dataSet.getNumLabel();
-        this.init(exportDir);
-                
+
+        // 序列数据有段的属性
+        this.MAX_LEN = this.dataSet.getMAX_LEN();
+        this.withSequenceInputs = this.dataSet.isWithSequenceInputs();
     }
-    
+
     /**
      * 初始化:加载模型,获取会话。
+     *
      * @param exportDir
      */
     public void init(String exportDir) {
@@ -54,29 +66,67 @@ public class TensorflowModel {
         }
 
         // create the session from the Bundle
-        this.session = bundle.session(); 
+        this.session = bundle.session();
+    }
+
+
+    /**
+     * 包装序列化输入
+     *
+     * @param sequenceValuesMap 序列输入的map
+     * @param numExamples       样本数
+     * @return
+     */
+    private Map<String, Tensor<Integer>> wrapSequenceInputs(Map<String, int[]> sequenceValuesMap, int numExamples) {
+        long[] inputShape = {numExamples, this.MAX_LEN};
+        Map<String, Tensor<Integer>> sequenceTensorMap = new HashMap<>();
+        for (Map.Entry<String, int[]> entry : sequenceValuesMap.entrySet()) {
+            String mapKey = entry.getKey();
+            Tensor<Integer> inputTensor = Tensor.create(
+                    inputShape,
+                    IntBuffer.wrap(entry.getValue())
+            );
+            sequenceTensorMap.put(mapKey, inputTensor);
+        }
+
+        return sequenceTensorMap;
     }
-    
+
+
     /**
      * 运行模型
-     * @param inputValues  输入值
-     * @param numExamples  样本个数
+     *
+     * @param inputValues 输入值
+     * @param numExamples 样本个数
      * @return 模型的输出
      */
-    private float[][] run(float[] inputValues, int numExamples){
-//        long[] inputShape = {numExamples, this.NUM_FEATURE, 4, 1};  // 新模型
-        long[] inputShape = {numExamples, this.NUM_FEATURE};  // 老模型
+    private float[][] run(float[] inputValues, Map<String, int[]> sequenceValues, int numExamples) {
+        long[] inputShape = {numExamples, this.NUM_FEATURE};
         Tensor<Float> inputTensor = Tensor.create(
-                inputShape,  
-                FloatBuffer.wrap(inputValues) 
+                inputShape,
+                FloatBuffer.wrap(inputValues)
         );
-        return this.session.runner().feed(this.INPUT_OPERATION_NAME, inputTensor)
-                .feed("keep_prob", Tensor.create(1.0f, Float.class))  // dropout保留率
-                .fetch(this.OUTPUT_OPERATION_NAME).run().get(0)
-                .copyTo(new float[numExamples][this.NUM_LABEL]);
+
+        // 序列数据
+        if (this.withSequenceInputs){
+            Map<String, Tensor<Integer>> sequenceTensorMap = this.wrapSequenceInputs(sequenceValues, numExamples);
+            this.session.runner();
+
+            return this.session.runner().feed(this.X, inputTensor)
+                    .feed(this.Char_ids, sequenceTensorMap.get(this.Char_ids))
+                    .feed(this.Pos_ids, sequenceTensorMap.get(this.Pos_ids))
+                    .feed("keep_prob", Tensor.create(1.0f, Float.class))  // dropout保留率
+                    .fetch(this.SOFT_MAX).run().get(0)
+                    .copyTo(new float[numExamples][this.NUM_LABEL]);
+        }else{
+            return this.session.runner().feed(this.X, inputTensor)
+                    .feed("keep_prob", Tensor.create(1.0f, Float.class))  // dropout保留率
+                    .fetch(this.SOFT_MAX).run().get(0)
+                    .copyTo(new float[numExamples][this.NUM_LABEL]);
+        }
     }
-    
-    
+
+
     /**
      * 运行模型,并将结果打包成目标格式
      */
@@ -85,14 +135,22 @@ public class TensorflowModel {
         float sum = 0;
         for (float f : inputValues)
             sum += f;
-        if(sum == 0)  // 如果输入没有有效特征,则直接返回null
+        if (sum == 0)  // 如果输入没有有效特征,则直接返回null
             return null;
-        
-        float[][] predict = this.run(inputValues, 1);  // 一次一个样本
-        return this.dataSet.wrap(predict);  
+
+        Map<String, int[]> sequenceValues = null;
+        if (this.withSequenceInputs){
+            sequenceValues = new HashMap<>();
+            sequenceValues.put(this.Char_ids, this.dataSet.toCharIds(inputs));
+            sequenceValues.put(this.Pos_ids, this.dataSet.toPosIds(inputs));
+        }
+
+
+        float[][] predict = this.run(inputValues, sequenceValues, 1);  // 一次一个样本
+        return this.dataSet.wrap(predict);
     }
-    
-    
+
+
     /**
      * 关闭会话,释放资源
      */

+ 202 - 17
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSet.java

@@ -21,12 +21,27 @@ public abstract class NNDataSet {
 
     protected final Map<String, Integer> LABEL_DICT = new HashMap<>();
     protected final Map<String, Integer> NEGATIVE_DICT = new HashMap<>();
-    protected final Map<String, String> RE_SPLIT_WORD_DICT = new HashMap<>();
-    protected final Map<String, Map<String, Integer>> RELATED_DIAGNOSIS_DICT = new HashMap<>();
-    protected final List<String> FEATURE_NAME_STORE = new ArrayList<>();
+
     private final String[] FEATURE_DICT_ARRAY;
     private final String[] LABEL_DICT_ARRAY;
-    private boolean doFilterDiagnosis = false;
+
+    // 再分词和疾病过滤相关容器
+    protected final Map<String, String> RE_SPLIT_WORD_DICT = new HashMap<>();  // 在分词表
+    protected final List<String> FEATURE_NAME_STORE = new ArrayList<>();  // 特征保存
+    protected final Map<String, Map<String, Integer>> RELATED_DIAGNOSIS_DICT = new HashMap<>();  // 特征与疾病相关表
+    private boolean doFilterDiagnosis = false;  // 是否做疾病过滤
+
+    private final float firstRateThreshold = 0.1f;  // 第一个疾病的概率阈值
+    private final float lastRateThreshold = 0.005f;  // 最后一个概率阈值
+    private final float rateSumThreshold = 0.6f;  // 概率和阈值
+    private final int numToPush = 3;  // 推荐推送的个数
+    private final float rapidFallTimes = 5;  // 骤降倍数
+
+    // 序列数据
+    private final int MAX_LEN = 257;
+    private boolean withSequenceInputs = false;  // 是否带有序列输入
+    protected final Map<String, Integer> CHAR2ID_DICT = new HashMap<>();
+
 
     public NNDataSet(String modelAndVersion) {
         this.readDict(modelAndVersion);
@@ -36,6 +51,8 @@ public abstract class NNDataSet {
         this.LABEL_DICT_ARRAY = new String[this.NUM_LABEL];
         this.makeDictArr();
         this.readReSplitWordDict();
+
+
     }
 
     /**
@@ -46,11 +63,33 @@ public abstract class NNDataSet {
      */
     public abstract float[] toFeatureVector(Map<String, Map<String, String>> inputs);
 
+    /**
+     * 装外部输入转为字符ids
+     *
+     * @param inputs
+     * @return
+     */
+    public abstract int[] toCharIds(Map<String, Map<String, String>> inputs);
+
+    /**
+     * 装外部输入转为位置ids
+     *
+     * @param inputs
+     * @return
+     */
+    public abstract int[] toPosIds(Map<String, Map<String, String>> inputs);
+
     /**
      * 读取特征和类别字典
      */
     public abstract void readDict(String modelAndVersion);
 
+
+    /**
+     * 读取特征和类别字典
+     */
+    public abstract void readChar2IdDict(String modelAndVersion);
+
     /**
      * 读取再分词字典
      */
@@ -74,36 +113,160 @@ public abstract class NNDataSet {
     }
 
     /**
-     * 打包特征名和概率 + 过滤疾病
-     * 基本操作,过滤前20个疾病,如果
+     * 推送个数过滤[无效病历]
+     * 规则:最大概率疾病的概率要超过给定阈值,如果不超过,则认为疾病不收敛,不予推送
+     *
+     * @param nameAndValueListSorted
+     */
+    private void pushCountFilterBefore(List<NameAndValue> nameAndValueListSorted) {
+        if (nameAndValueListSorted.get(0).getValue() < this.firstRateThreshold)
+            nameAndValueListSorted.clear();
+    }
+
+    /**
+     * 推送个数过滤[概率和和概率骤降过滤]
+     * 规则:
+     * 1- 为了防止一棍子打死,我们还是尽量要推送3个病历的,除非概率骤降。
+     * 2- 概率骤降过滤,当病历收敛到一个或几个疾病之后,再出现的疾病,概率会骤然下降很多倍
+     * ,这时,这个疾病差不多是随机推送的,因此要过滤掉。【都要做】
+     * 2- 概率和,就是概率和不超过某个阈值【只有在剩余疾病个数超过阈值时做】
+     *
+     * @param nameAndValueListSorted
+     */
+    private void pushCountFilterAfter(List<NameAndValue> nameAndValueListSorted) {
+
+        // 如果不超过尽量推送的个数,只做概率骤降判断
+        Iterator<NameAndValue> it = nameAndValueListSorted.iterator();
+        boolean deleteTheRest = false;   // 是否删除剩余的疾病
+        float preRate = 0.0f; // 前一个疾病的概率
+        int restCnt = 0;  // 剩余疾病数
+        float rateSum = 0.0f;  // 概率和
+
+        while (it.hasNext()) {
+//            NameAndValue nameAndValue = it.next();
+//            if (!deleteTheRest) {
+//                // 相对于前一个疾病概率骤降rapidFallTimes倍
+//                if (preRate / nameAndValue.getValue() >= this.rapidFallTimes)
+//                    deleteTheRest = true;
+//                else {
+//                    rateSum += nameAndValue.getValue();
+//                    preRate = nameAndValue.getValue();
+//                    restCnt += 1;
+//                }
+//            }
+//
+//            if (deleteTheRest)  // 删除剩下的疾病
+//                it.remove();
+//
+//
+//            if (!deleteTheRest && restCnt >= this.numToPush) {
+//
+//                // 如果超过尽量推送的个数,那么做概率和阈值过滤【从下一个开始删除】
+//                if (rateSum >= this.rateSumThreshold)
+//                    deleteTheRest = true;
+//            }
+
+            NameAndValue nameAndValue = it.next();
+            if (!deleteTheRest) {
+                // 最后一个必须大于某个阈值
+                if (nameAndValue.getValue() < this.lastRateThreshold)
+                    deleteTheRest = true;
+            }
+
+            if (deleteTheRest)  // 删除剩下的疾病
+                it.remove();
+
+        }
+
+    }
+
+    /**
+     * 打包特征名和概率 + 过滤疾病 + 推送个数选择
+     * 基本操作,过滤前20个疾病,如果有疾病留下,否则前50个疾病
      *
      * @param predict 模型输出
      * @return
      */
-    public Map<String, Float> wrapAndFilter(float[][] predict) {
+    public Map<String, Float> wrapAndFilterWithPushCountFilter(float[][] predict) {
         List<NameAndValue> nameAndValueList = new ArrayList<>();
         for (int i = 0; i < predict[0].length; i++)
             nameAndValueList.add(new NameAndValue(this.LABEL_DICT_ARRAY[i], predict[0][i]));
         nameAndValueList.sort(Comparator.reverseOrder());  // 按概率从大到小排列
 
+//        System.out.println("原来__推送:...............................................................");
+//        System.out.println(nameAndValueList.subList(0, 10));
+
+        pushCountFilterBefore(nameAndValueList);  // 推送个数过滤【无效病历过滤】
+
+//        nameAndValueList = filterDiagnosis(nameAndValueList);  // 疾病过滤
+
+        this.pushCountFilterAfter(nameAndValueList);  // 推送个数过滤【概率骤降和概率和阈值过滤】
+
+//        System.out.println("新版本__最终__推送:.......................................................");
+//        System.out.println("长度:" + nameAndValueList.size());
+//        System.out.println(nameAndValueList);
+
         Map<String, Float> result = new HashMap<>();
+        for (NameAndValue nameAndValue : nameAndValueList)
+            result.put(nameAndValue.getName(), nameAndValue.getValue());
+
+        return result;
+    }
+
+    /**
+     * 疾病过滤
+     * 基本规则:
+     * 如果没有一个特征与该疾病共现过,那么删除该疾病
+     *
+     * @param nameAndValueListSorted
+     * @return
+     */
+    public List<NameAndValue> filterDiagnosis(List<NameAndValue> nameAndValueListSorted) {
         Integer cnt = 0;
         String diagnosis;
         NameAndValue nameAndValue;
         Map<String, Integer> relatedDiagnoses = null;
-        for (int i = 0; i < nameAndValueList.size(); i++) {
-            nameAndValue = nameAndValueList.get(i);
+        List<NameAndValue> candidateNameAndValues = new ArrayList<>();
+        for (int i = 0; i < nameAndValueListSorted.size(); i++) {
+            nameAndValue = nameAndValueListSorted.get(i);
             diagnosis = nameAndValue.getName();
+
             for (String featureName : this.FEATURE_NAME_STORE) {
                 relatedDiagnoses = this.RELATED_DIAGNOSIS_DICT.get(featureName);
                 if (relatedDiagnoses != null && relatedDiagnoses.get(diagnosis) != null) {
-                    result.put(nameAndValue.getName(), nameAndValue.getValue());
+                    candidateNameAndValues.add(nameAndValue);
                     cnt += 1;
+                    break;  // 有一个共现即可
                 }
             }
             if ((i >= 20 || i >= 50) && cnt > 0)  // 如果前20或50个推送中有相关的疾病,只过滤他们
                 break;
         }
+        return candidateNameAndValues;
+    }
+
+    /**
+     * 打包特征名和概率 + 过滤疾病
+     * 基本操作,过滤前20个疾病,如果
+     *
+     * @param predict 模型输出
+     * @return
+     */
+    public Map<String, Float> wrapAndFilter(float[][] predict) {
+        List<NameAndValue> nameAndValueList = new ArrayList<>();
+        for (int i = 0; i < predict[0].length; i++)
+            nameAndValueList.add(new NameAndValue(this.LABEL_DICT_ARRAY[i], predict[0][i]));
+        nameAndValueList.sort(Comparator.reverseOrder());  // 按概率从大到小排列
+
+        nameAndValueList = filterDiagnosis(nameAndValueList);  // 疾病过滤
+
+//        System.out.println("原版本__最终__推送 ......................................................");
+//        System.out.println("长度:" + nameAndValueList.size());
+//        System.out.println(nameAndValueList);
+
+        Map<String, Float> result = new HashMap<>();
+        for (NameAndValue nameAndValue : nameAndValueList)
+            result.put(nameAndValue.getName(), nameAndValue.getValue());
         return result;
     }
 
@@ -137,6 +300,14 @@ public abstract class NNDataSet {
         public String getName() {
             return name;
         }
+
+        @Override
+        public String toString() {
+            return "NameAndValue{" +
+                    "name='" + name + '\'' +
+                    ", value=" + value +
+                    '}';
+        }
     }
 
     /**
@@ -147,8 +318,9 @@ public abstract class NNDataSet {
      */
     public Map<String, Float> wrap(float[][] predict) {
         if (this.doFilterDiagnosis)  // 过滤疾病
-            return this.wrapAndFilter(predict);
-        else
+        {
+            return this.wrapAndFilterWithPushCountFilter(predict);
+        } else
             return this.basicWrap(predict);
     }
 
@@ -175,17 +347,16 @@ public abstract class NNDataSet {
     }
 
     /**
-     *  存储特征名称
+     * 存储特征名称
+     *
      * @param features
      */
-    public void storeFeatureNames(Map<String, Map<String, String>> features){
+    public void storeFeatureNames(Map<String, Map<String, String>> features) {
+        this.FEATURE_NAME_STORE.size();  // this.FEATURE_NAME_STORE.clear() 未知原因会出现数据越界异常,加了这个则没有了
         this.FEATURE_NAME_STORE.clear();
         this.FEATURE_NAME_STORE.addAll(features.keySet());
     }
 
-    /**
-     * @return
-     */
     public int getNumLabel() {
         return this.NUM_LABEL;
     }
@@ -195,4 +366,18 @@ public abstract class NNDataSet {
         this.doFilterDiagnosis = doFilterDiagnosis;
     }
 
+
+    public int getMAX_LEN() {
+        return MAX_LEN;
+    }
+
+
+    public void setWithSequenceInputs(boolean withSequenceInputs) {
+        this.withSequenceInputs = withSequenceInputs;
+    }
+
+
+    public boolean isWithSequenceInputs() {
+        return withSequenceInputs;
+    }
 }

+ 78 - 7
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImpl.java

@@ -3,10 +3,10 @@ package org.algorithm.core.neural.dataset;
 import org.algorithm.util.TextFileReader;
 import org.diagbot.pub.utils.PropertiesUtil;
 
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.*;
 import java.util.Map.Entry;
 
 /**
@@ -66,6 +66,42 @@ public class NNDataSetImpl extends NNDataSet {
         return featureVector;
     }
 
+    @Override
+    public int[] toCharIds(Map<String, Map<String, String>> inputs) {
+        String sentence = inputs.get("sentence").get("sentence");
+        int max_len = this.getMAX_LEN();
+        int[] ids = new int[max_len];
+        char ch = '1';
+        Integer id = null;
+        for (int i = 0; i < sentence.length() && i < max_len; i++) {  // 不超过最大长度
+            ch = sentence.charAt(i);
+            id = this.CHAR2ID_DICT.get(String.valueOf(ch));
+            if (id == null) {
+                id = this.CHAR2ID_DICT.get("<UNC>");
+            }
+            ids[i] = id;
+        }
+        for (int i = sentence.length(); i < max_len; i++)  // padding
+            ids[i] = this.CHAR2ID_DICT.get("<PAD>");
+
+        return ids;
+    }
+
+    @Override
+    public int[] toPosIds(Map<String, Map<String, String>> inputs) {
+        int max_len = this.getMAX_LEN();
+        String sentence = inputs.get("sentence").get("sentence");
+        int[] pos_ids = new int[max_len];
+        for (int j=0; j<max_len; j++)
+            pos_ids[j] = max_len - 1;  // 位置的padding
+
+        // 绝对位置编码
+        for (int i = 0 ; i < (sentence.length() < max_len ? sentence.length() : max_len); i++)
+            pos_ids[i] = i;
+
+        return pos_ids;
+    }
+
     @Override
     public void readDict(String modelAndVersion) {
 
@@ -105,7 +141,42 @@ public class NNDataSetImpl extends NNDataSet {
 
         }
 
-        System.out.println("feature size:" + this.FEATURE_DICT.size());
+//        System.out.println("feature size:" + this.FEATURE_DICT.size());
+
+    }
+
+    @Override
+    public void readChar2IdDict(String modelAndVersion) {
+
+        // 获取文件目录
+        PropertiesUtil prop = new PropertiesUtil("/algorithm.properties");
+        String filePath = prop.getProperty("basicPath");  // 基本目录
+        filePath = filePath.substring(0, filePath.indexOf("model_version_replacement"));
+
+        filePath = filePath + "char2id.bin";  // 字典文件位置
+
+        // 读取以json字符串保存的数据
+        BufferedReader br = null;
+        try {
+            br = new BufferedReader(new FileReader(filePath));  // 读取原始json文件
+            String line = null;
+            String[] pair = null;
+            while ((line = br.readLine()) != null) {
+                line = line.trim();
+                if (line.indexOf("_|_") > -1){
+                    pair = line.split("_\\|_");
+                    this.CHAR2ID_DICT.put(pair[0], Integer.parseInt(pair[1]));
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            try {
+                br.close();
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
 
     }
 
@@ -171,7 +242,7 @@ public class NNDataSetImpl extends NNDataSet {
 
         }
 
-        System.out.println("再分词,词条数:" + this.RE_SPLIT_WORD_DICT.size());
+//        System.out.println("再分词,词条数:" + this.RE_SPLIT_WORD_DICT.size());
 
     }
 
@@ -204,7 +275,7 @@ public class NNDataSetImpl extends NNDataSet {
             this.RELATED_DIAGNOSIS_DICT.put(temp[0], diagnosis_map);
         }
 
-        System.out.println("疾病过滤字典大小:" + this.RELATED_DIAGNOSIS_DICT.size());
+//        System.out.println("疾病过滤字典大小:" + this.RELATED_DIAGNOSIS_DICT.size());
     }
 
 

+ 15 - 0
algorithm/src/main/java/org/algorithm/core/neural/dataset/NNDataSetImplNonParallel.java

@@ -132,6 +132,16 @@ public class NNDataSetImplNonParallel extends NNDataSet {
         return results;
     }
 
+    @Override
+    public int[] toCharIds(Map<String, Map<String, String>> inputs) {
+        return new int[0];
+    }
+
+    @Override
+    public int[] toPosIds(Map<String, Map<String, String>> inputs) {
+        return new int[0];
+    }
+
 
     /**
      * 读取字典
@@ -186,4 +196,9 @@ public class NNDataSetImplNonParallel extends NNDataSet {
 
     }
 
+    @Override
+    public void readChar2IdDict(String modelAndVersion) {
+
+    }
+
 }

+ 24 - 44
algorithm/src/main/java/org/algorithm/test/Test.java

@@ -1,56 +1,36 @@
 package org.algorithm.test;
 
-import java.util.*;
-
-public class Test {
 
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
 
+public class Test {
+    
     public static void main(String[] args) {
-        List<Integer> data = new ArrayList<>();
-        data.add(1);
-        data.add(3);
-        data.add(5);
-        data.add(7);
-        Test t = new Test();
-
-        List<List<Integer>> workSpace = new ArrayList<>();
-        for (int i = 1; i < data.size(); i++) {
-            t.combinerSelect(data, new ArrayList<>(), workSpace, data.size(), i);
-        }
-
-        System.out.println(workSpace);
 
-    }
-
-    /**
-     * 组合生成器
-     *
-     * @param data      原始数据
-     * @param workSpace 自定义一个临时空间,用来存储每次符合条件的值
-     * @param k         C(n,k)中的k
-     */
-    public <E> void combinerSelect(List<E> data, List<E> workSpace, List<List<E>> result, int n, int k) {
-        List<E> copyData;
-        List<E> copyWorkSpace = null;
-
-        if (workSpace.size() == k) {
-            for (E c : workSpace)
-                System.out.print(c);
-
-            result.add(new ArrayList<>(workSpace));
-            System.out.println();
+        List<String> aList = new ArrayList<>();
+        aList.add("del");
+        aList.add("del");
+        aList.add("xx");
+        aList.add("yy");
+
+        Iterator<String> it = aList.iterator();
+        boolean xx = false;
+        while(it.hasNext()){
+            String x = it.next();
+            if (!xx){
+
+                if (x.equals("xx"))
+                    xx = true;
+            }
+            if(xx){
+                it.remove();
+            }
         }
 
-        for (int i = 0; i < data.size(); i++) {
-            copyData = new ArrayList<E>(data);
-            copyWorkSpace = new ArrayList<E>(workSpace);
+        System.out.println(aList);
 
-            copyWorkSpace.add(copyData.get(i));
-            for (int j = i; j >= 0; j--)
-                copyData.remove(j);
-            combinerSelect(copyData, copyWorkSpace, result, n, k);
-        }
     }
 
 }
-

+ 1 - 1
algorithm/src/main/resources/algorithm.properties

@@ -2,7 +2,7 @@
 
 #basicPath=E:/project/push/algorithm/src/main/models/model_version_replacement/model
 basicPath=/opt/models/dev/models/model_version_replacement/model
-#basicPath=F:/models/model_version_replacement/model
+#basicPath=E:/models_2019_9_24_16_21_29/model_version_replacement/model
 
 ############################### current model version ################################
 diagnosisPredict.version=outpatient_556_IOE_1

+ 21 - 4
bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java

@@ -6,6 +6,7 @@ import org.algorithm.util.AlgorithmClassify;
 import org.diagbot.common.push.bean.FeatureRate;
 import org.diagbot.common.push.bean.ResponseData;
 import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.naivebayes.factory.AlgorithmNaiveBayesFactory;
 import org.diagbot.nlp.feature.FeatureType;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.nlp.util.NlpCache;
@@ -47,12 +48,28 @@ public class AlgorithmCore {
             if (classifies[i] == null) {
                 continue;
             }
-            //算法推理
-            AlgorithmExecutor executor = AlgorithmFactory.getInstance(classifies[i]);
             Map<String, Float> featuresMap = null;
+            AlgorithmExecutor executor = null;
+            switch (searchData.getAlgorithmType() == null ? 1 : searchData.getAlgorithmType()) {
+                case 1: //机器学习算法推理
+                    executor = AlgorithmFactory.getInstance(classifies[i]);
+                    if (FeatureType.parse(featureTypes[i]) == FeatureType.DIAG && !"2".equals(searchData.getSysCode())) {
+                        bigDataSearchData.setLength(6);//模型推送最多6个比较合理
+                    }
+                    break;
+                case 2: //朴素贝叶斯算法推理
+                    if (FeatureType.parse(featureTypes[i]) == FeatureType.DIAG) {
+                        executor = AlgorithmNaiveBayesFactory.getInstance();
+                    }
+                    break;
+                default:
+                    executor = AlgorithmFactory.getInstance(classifies[i]);
+            }
+
             if (executor != null) {
-                featuresMap = executor.execute(bigDataSearchData.getInputs());
-                ;
+                featuresMap = executor.execute(bigDataSearchData.getInputs());;
+            } else {
+                continue;
             }
             List<Map.Entry<String, Float>> featuresOrderList = null;
             if (featuresMap == null) {

+ 1 - 1
bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java

@@ -180,7 +180,7 @@ public class ResultDataProxy {
                 ResultMappingFilter filter = filterMap.get(featureRate.getFeatureName());
                 if (filter != null) {
                     if (filter.getSex() != null && !StringUtils.isEmpty(searchData.getSex())
-                            && !filter.getSex().equals(searchData.getSex())) {      //性别过滤
+                            && filter.getSex().equals(searchData.getSex()) || filter.getSex().equals("3")) {      //性别过滤
                         isFirst = true;
                     } else {
                         isFirst = false;

+ 63 - 0
common-push/src/main/java/org/diagbot/common/push/bean/RelevantFeature.java

@@ -0,0 +1,63 @@
+package org.diagbot.common.push.bean;
+
+/**
+ * @Description:
+ * @author: wangyu
+ * @time: 2019/10/14 16:36
+ */
+public class RelevantFeature {
+    private String id;
+    private String diagnose;
+    private String feature;
+    private String feature_type;
+    private String value_type;
+    private String find_suspect_diagnose;
+
+    public String getId() {
+        return id;
+    }
+
+    public void setId(String id) {
+        this.id = id;
+    }
+
+    public String getDiagnose() {
+        return diagnose;
+    }
+
+    public void setDiagnose(String diagnose) {
+        this.diagnose = diagnose;
+    }
+
+    public String getFeature() {
+        return feature;
+    }
+
+    public void setFeature(String feature) {
+        this.feature = feature;
+    }
+
+    public String getFeature_type() {
+        return feature_type;
+    }
+
+    public void setFeature_type(String feature_type) {
+        this.feature_type = feature_type;
+    }
+
+    public String getValue_type() {
+        return value_type;
+    }
+
+    public void setValue_type(String value_type) {
+        this.value_type = value_type;
+    }
+
+    public String getFind_suspect_diagnose() {
+        return find_suspect_diagnose;
+    }
+
+    public void setFind_suspect_diagnose(String find_suspect_diagnose) {
+        this.find_suspect_diagnose = find_suspect_diagnose;
+    }
+}

+ 10 - 0
common-push/src/main/java/org/diagbot/common/push/bean/SearchData.java

@@ -60,6 +60,8 @@ public class SearchData {
     private Map<String, Map<String, String>> filters = new HashMap<>(10, 0.8f);
     //满足规则的ID集合
     private Map<String, List<Rule>> rules = new HashMap<>();
+    //特征推送走的模型 1:机器学习 2:朴素贝叶斯
+    private Integer algorithmType;
 
     public Integer getDisType() {
         return disType;
@@ -315,6 +317,14 @@ public class SearchData {
         this.rules = rules;
     }
 
+    public Integer getAlgorithmType() {
+        return algorithmType;
+    }
+
+    public void setAlgorithmType(Integer algorithmType) {
+        this.algorithmType = algorithmType;
+    }
+
     public String getDiseaseName() {
         return diseaseName;
     }

+ 155 - 0
common-push/src/main/java/org/diagbot/common/push/cache/ApplicationCacheUtil.java

@@ -1,5 +1,6 @@
 package org.diagbot.common.push.cache;
 
+import org.diagbot.common.push.bean.RelevantFeature;
 import org.diagbot.common.push.bean.ResultMappingFilter;
 import org.diagbot.common.push.bean.Rule;
 import org.diagbot.common.push.bean.RuleApp;
@@ -30,6 +31,12 @@ public class ApplicationCacheUtil {
     public static Map<String, RuleApp> kl_rule_app_filter_map = null;
     //pacs关系抽取过滤
     public static Map<String, Map<String, String>> kl_diagnose_detail_filter_map = null;
+    //朴素贝叶斯
+    public static Map<String, Map<String, Float>> doc_feature_naivebayes_prob_map = null;
+    //朴素贝叶斯规则过滤
+    public static Map<String, Map<String, Float>> relevant_feature_bayes_map = null;
+    //体征过滤对比表信息
+    public static Map<String, RelevantFeature> relevant_feature_map = null;
 
     public static Map<String, Map<String, String>> getStandard_info_synonym_map() {
         if (standard_info_synonym_map == null) {
@@ -237,4 +244,152 @@ public class ApplicationCacheUtil {
             }
         }
     }
+
+    public static Map<String, Map<String, Float>> getDoc_feature_naivebayes_prob_map() {
+        if (doc_feature_naivebayes_prob_map == null) {
+            create_doc_feature_naivebayes_prob_map();
+        }
+        return doc_feature_naivebayes_prob_map;
+    }
+
+    public static void create_doc_feature_naivebayes_prob_map() {
+        doc_feature_naivebayes_prob_map = new HashMap<>();
+        //<rdn,[feature...]> 存储每个rdn对应的特征List
+        Map<String, List<String>> featureMap = new HashMap<>();
+        List<String> featureList = null;
+        Configuration configuration = new DefaultConfig();
+        List<String> fileFeatureContents = configuration.readFileContents("bigdata_naivebayes_features.dict");
+        for (String line : fileFeatureContents) {
+            String[] content = line.split("\\|", -1);
+            if (featureMap.get(content[0]) == null) {
+                featureList = new ArrayList<>();
+                for (String feature : content[1].split(" ")) {
+                    featureList.add(feature);
+                }
+                featureMap.put(content[0], featureList);
+            }
+        }
+
+        //<rdn,diagnose> 存每个rdn对应疾病
+        Map<String, String> diagnoseMap = new HashMap<>();
+        //<diagnose,count> 存每个疾病的数量
+        Map<String, Integer> diagnoseCount = new HashMap<>();
+        List<String> fileDiagnoseContents = configuration.readFileContents("bigdata_naivebayes_diagnose.dict");
+        diagnoseCount.put("diagnoseCount", fileDiagnoseContents.size());
+        for (String line : fileDiagnoseContents) {
+            String[] content = line.split("\\|", -1);
+            if (diagnoseMap.get(content[0]) == null) {
+                diagnoseMap.put(content[0], content[1]);
+            }
+            if (diagnoseCount.get(content[1]) == null) {
+                diagnoseCount.put(content[1], 1);
+            } else {
+                diagnoseCount.put(content[1], diagnoseCount.get(content[1]) + 1);
+            }
+        }
+
+        Map<String, Map<String, Integer>> diagnose2featureCount = new HashMap<>();
+        Map<String, Integer> featureCount = new HashMap<>();
+        for (Map.Entry<String, String> diagnoseMapEntry : diagnoseMap.entrySet()) {
+            //featureMap -> <1000000_144 , [咳嗽,咳痰,1周,气管炎]>
+            if (featureMap.get(diagnoseMapEntry.getKey()) == null) {
+                continue;
+            }
+            for (String feature : featureMap.get(diagnoseMapEntry.getKey())) {
+                /**
+                 diagnoseMapEntry <1596386_9,鼻炎> -> <rdn,diagnose>
+                 如果疾病对应特征列表为空 diagnoseMapEntry.getValue()->疾病
+                 */
+                if (diagnose2featureCount.get(diagnoseMapEntry.getValue()) == null) {
+                    featureCount = new HashMap<>();
+                    //featureMap -> <1000000_144 , [咳嗽,咳痰,1周,气管炎]>
+                    if (featureCount.get(feature) == null) {
+                        featureCount.put(feature, 1);
+                    } else {
+                        featureCount.put(feature, featureCount.get(feature) + 1);
+                    }
+                    //疾病对应病历数
+                    featureCount.put("diagnoseCount", diagnoseCount.get(diagnoseMapEntry.getValue()));
+                    diagnose2featureCount.put(diagnoseMapEntry.getValue(), featureCount);
+                } else {
+                    if (diagnose2featureCount.get(diagnoseMapEntry.getValue()).get(feature) == null) {
+                        diagnose2featureCount.get(diagnoseMapEntry.getValue()).put(feature, 1);
+                    } else {
+                        diagnose2featureCount.get(diagnoseMapEntry.getValue())
+                                .put(feature, diagnose2featureCount.get(diagnoseMapEntry.getValue()).get(feature) + 1);
+                    }
+                }
+            }
+        }
+
+        Map<String, Float> prob = null;
+        for (Map.Entry<String, Map<String, Integer>> diagnose2featureCountEntry : diagnose2featureCount.entrySet()) {
+            prob = new HashMap<>();
+            //计算先验概率
+            float priorProb = (float) diagnose2featureCountEntry.getValue().get("diagnoseCount") / diagnoseCount.get("diagnoseCount");
+            prob.put("priorProb", priorProb);
+            //计算条件概率
+            for (Map.Entry<String, Integer> featuresCount : diagnose2featureCountEntry.getValue().entrySet()) {
+                float conditionProb = (float) featuresCount.getValue() / diagnose2featureCountEntry.getValue().get("diagnoseCount");
+                prob.put(featuresCount.getKey(), conditionProb);
+            }
+            doc_feature_naivebayes_prob_map.put(diagnose2featureCountEntry.getKey(), prob);
+        }
+    }
+
+    public static Map<String, Map<String,Float>> getRelevant_feature_map() {
+        if (relevant_feature_bayes_map == null) {
+            createRelevant_feature_map();
+        }
+        return relevant_feature_bayes_map;
+    }
+
+    public static Map<String, Map<String,Float>> createRelevant_feature_map() {
+        relevant_feature_bayes_map = new HashMap<>();
+        Map<String,Float> relevantFeatureProb = null;
+        Configuration configuration = new DefaultConfig();
+        List<String> relevantFeatureList = configuration.readFileContents("bigdata_relevant_feature.dict");
+        for (String relevantFeature:relevantFeatureList) {
+            String[] content = relevantFeature.split("\\|", -1);
+            if (relevant_feature_bayes_map.get(content[0]) == null){
+                relevantFeatureProb = new HashMap<>();
+                relevantFeatureProb.put(content[1],0.00f);
+                relevant_feature_bayes_map.put(content[0],relevantFeatureProb);
+            } else {
+                relevant_feature_bayes_map.get(content[0]).put(content[1],0.00f);
+            }
+        }
+        return relevant_feature_bayes_map;
+    }
+
+    /**
+     * 体征过滤获取对比表信息
+     *
+     * @return
+     */
+    public static Map<String, RelevantFeature> get_relevant_feature() {
+        if (relevant_feature_map == null) {
+            create_get_relevant_feature();
+        }
+        return relevant_feature_map;
+    }
+
+    public static void create_get_relevant_feature(){
+        relevant_feature_map = new HashMap<>();
+        Configuration configuration = new DefaultConfig();
+        List<String> fileContents = configuration.readFileContents("bigdata_diagnose_feature_filter.dict");
+        for (String line : fileContents) {
+            String[] content = line.split("\\|", -1);
+            RelevantFeature relevantFeature = new RelevantFeature();
+            if (content.length == 6) {
+                relevantFeature.setId(content[0] == null ? "" : content[0]);
+                relevantFeature.setDiagnose(content[1] == null ? "" : content[1]);
+                relevantFeature.setFeature(content[2] == null ? "" : content[2]);
+                relevantFeature.setFeature_type(content[3] == null ? "" : content[3]);
+                relevantFeature.setFind_suspect_diagnose(content[4] == null ? "" : content[4]);
+                relevantFeature.setValue_type(content[5] == null ? "" : content[5]);
+                relevant_feature_map.put(relevantFeature.getDiagnose(),relevantFeature);
+            }
+        }
+    }
 }

+ 64 - 4
common-push/src/main/java/org/diagbot/common/push/cache/CacheFileManager.java

@@ -411,11 +411,47 @@ public class CacheFileManager {
             }
             fw.close();
 
+            sql = "SELECT rdn, GROUP_CONCAT(feature_name ORDER BY sn SEPARATOR ' ') AS features FROM doc_feature WHERE feature_type = 9 GROUP BY rdn;";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_naivebayes_features.dict");
+            while (rs.next()) {
+                r1 = rs.getString(1);
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1+ "|" + r2));
+                fw.write("\n");
+            }
+            fw.close();
+
+            sql = "select rdn, feature_name as diagnose from doc_feature where feature_type=2";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_naivebayes_diagnose.dict");
+            while (rs.next()) {
+                r1 = rs.getString(1);
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1+ "|" + r2));
+                fw.write("\n");
+            }
+            fw.close();
+
+            sql = "SELECT diagnose,feature FROM doc_relevant_feature;";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_relevant_feature.dict");
+            while (rs.next()) {
+                r1 = rs.getString(1);
+                r2 = rs.getString(2);
+                fw.write(encrypDES.encrytor(r1+ "|" + r2));
+                fw.write("\n");
+            }
+            fw.close();
+
             //化验辅检体征性别年龄
-            sql = "SELECT k1.lib_name, k1.lib_type, kcc.sex_type, kcc.min_age, kcc.max_age " +
-                    "FROM kl_concept_common kcc, kl_concept k1 " +
-                    "where kcc.concept_id = k1.id " +
-                    "and k1.lib_type in (1, 18,12,16,33,35)";
+            sql = "SELECT k1.lib_name, k1.lib_type, IFNULL(kcc.sex_type,3) sex_type, IFNULL(kcc.min_age, 0) min_age, IFNULL(kcc.max_age,200)  max_age\n" +
+                    "from kl_concept k1 LEFT JOIN kl_concept_common kcc on kcc.concept_id = k1.id \n" +
+                    "where  k1.lib_type in (1, 18,12,16,33,35)\n" +
+                    "AND k1.is_deleted = 'N'\n";
             st = conn.createStatement();
             rs = st.executeQuery(sql);
             fw = new FileWriter(path + "bigdata_lpv_sex_age_filter.dict");//化验辅检体征相关文件
@@ -442,6 +478,30 @@ public class CacheFileManager {
             }
             fw.close();
 
+            //特征提取过滤参照表信息
+            sql = "SELECT id,diagnose,feature,feature_type,value_type,find_suspect_diagnose FROM `doc_relevant_feature`";
+            st = conn.createStatement();
+            rs = st.executeQuery(sql);
+            fw = new FileWriter(path + "bigdata_diagnose_feature_filter.dict");
+            while (rs.next()) {
+                r1 = String.valueOf(rs.getInt(1));
+                r2 = rs.getString(2);
+                r3 = rs.getString(3);
+                r4 = rs.getString(4);
+                r5 = rs.getString(5);
+                r6 = rs.getString(6);
+                r1 = StringUtils.isEmpty(r1) ? "" : r1;
+                r2 = StringUtils.isEmpty(r2) ? "" : r2;
+                r3 = StringUtils.isEmpty(r3) ? "" : r3;
+                r4 = StringUtils.isEmpty(r4) ? "" : r4;
+                r5 = StringUtils.isEmpty(r5) ? "" : r5;
+                r6 = StringUtils.isEmpty(r6) ? "" : r6;
+                fw.write(encrypDES.encrytor(r1 + "|" + r2 + "|" + r3 + "|" + r4 + "|" + r5
+                        + "|" + r6 ));
+                fw.write("\n");
+            }
+            fw.close();
+
         } catch (IOException ioe) {
             ioe.printStackTrace();
         } catch (SQLException sqle) {

+ 30 - 0
common-push/src/main/java/org/diagbot/common/push/naivebayes/NaiveBayesTest.java

@@ -0,0 +1,30 @@
+package org.diagbot.common.push.naivebayes;
+
+import org.diagbot.common.push.naivebayes.core.AlgorithmNaiveBayesExecutor;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/10/11 14:30
+ */
+public class NaiveBayesTest {
+    public static void main(String[] args) {
+        AlgorithmNaiveBayesExecutor a = new AlgorithmNaiveBayesExecutor();
+        Map<String, Map<String, String>> inputs = new HashMap<>();
+        inputs.put("咽部异物感",new HashMap<>());
+//        inputs.put("腹胀",new HashMap<>());
+//        inputs.put("乏力",new HashMap<>());
+        Map<String, Float> softmax = a.execute(inputs);
+        double i = 0.00;
+        for (Map.Entry<String, Float> s:softmax.entrySet()) {
+            i += s.getValue();
+            if (s.getValue() == 0){
+                System.out.println(s.getKey());
+            }
+        }
+        System.out.println(i);
+    }
+}

+ 92 - 0
common-push/src/main/java/org/diagbot/common/push/naivebayes/core/AlgorithmNaiveBayesExecutor.java

@@ -0,0 +1,92 @@
+package org.diagbot.common.push.naivebayes.core;
+
+import org.algorithm.core.AlgorithmExecutor;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
+
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/10/11 14:25
+ */
+public class AlgorithmNaiveBayesExecutor extends AlgorithmExecutor {
+    private double e = Math.E;
+    private static double unknownProbWithRelevant = -2; //已知有关,但未在病历中统计出来的特征
+    private static double unknownProbWithoutRelevant = -6;  //无关事件间的共现概率
+    private static double denominator = 0.00;
+
+    public Map<String, Float> execute(Map<String, Map<String, String>> inputs) {
+        return softmax(probCalc(inputs));
+    }
+
+    public Map<String, Float> probCalc(Map<String, Map<String, String>> inputs) {
+        Map<String, Map<String, Float>> doc_feature_naivebayes_prob_map = ApplicationCacheUtil.getDoc_feature_naivebayes_prob_map();
+        Map<String, Map<String, Float>> relevant_feature_map = ApplicationCacheUtil.getRelevant_feature_map();
+        Map<String, Float> naivebayesResult = new HashMap<>();
+        for (Map.Entry<String, Map<String, Float>> naivebayesProb : doc_feature_naivebayes_prob_map.entrySet()) {
+            float sum = 0.00f;
+            int i = 1;
+            for (String input : inputs.keySet()) {
+                //先验概率表里有该特征,就使用该特征的先验概率
+                if (naivebayesProb.getValue().containsKey(input)) {
+                    sum += Math.log10(naivebayesProb.getValue().get(input));
+                } else if (relevant_feature_map.get(naivebayesProb.getKey()) != null &&
+                        relevant_feature_map.get(naivebayesProb.getKey()).containsKey(input)) {
+                    //先验概率表里没有该特征 但 关联规则表里有该特征,则平滑处理(默认此时先验概率为10^-2)
+                    sum += unknownProbWithRelevant;
+                } else {
+                    sum += unknownProbWithoutRelevant;
+                }
+
+                if (i == inputs.size()) {
+                    sum += Math.log10(naivebayesProb.getValue().get("priorProb"));
+                    naivebayesResult.put(naivebayesProb.getKey(), sum);
+                }
+                i++;
+            }
+        }
+//        naivebayesResult = sortMap(naivebayesResult);
+        return naivebayesResult;
+    }
+
+    private Map<String, Float> softmax(Map<String, Float> naivebayesResultMap) {
+        Map<String, Float> softmaxResult = new HashMap<>();
+        calaDenominator(naivebayesResultMap);
+
+        for (Map.Entry<String, Float> naivebayesResult : naivebayesResultMap.entrySet()) {
+            softmaxResult.put(naivebayesResult.getKey(), (float) (Math.pow(this.e, naivebayesResult.getValue()) / denominator));
+        }
+
+        softmaxResult = sortMap(softmaxResult);
+        return softmaxResult;
+    }
+
+    private void calaDenominator(Map<String, Float> naivebayesResultMap) {
+        if (denominator == 0) {
+            for (Map.Entry<String, Float> naivebayesResult : naivebayesResultMap.entrySet()) {
+                //计算softmax算法分母
+                denominator += Math.pow(this.e, naivebayesResult.getValue());
+            }
+        }
+    }
+
+    public Map<String, Float> sortMap(Map<String, Float> ResultMap) {
+        ArrayList<Map.Entry<String, Float>> softmaxResultList = new ArrayList<>(ResultMap.entrySet());
+        softmaxResultList.sort(new Comparator<Map.Entry<String, Float>>() {
+            @Override
+            public int compare(Map.Entry<String, Float> o1, Map.Entry<String, Float> o2) {
+                return o2.getValue().compareTo(o1.getValue());
+            }
+        });
+        ResultMap = new LinkedHashMap<>();
+        for (Map.Entry<String, Float> softmaxResultMap : softmaxResultList) {
+            ResultMap.put(softmaxResultMap.getKey(), softmaxResultMap.getValue());
+        }
+        return ResultMap;
+    }
+}

+ 34 - 0
common-push/src/main/java/org/diagbot/common/push/naivebayes/factory/AlgorithmNaiveBayesFactory.java

@@ -0,0 +1,34 @@
+package org.diagbot.common.push.naivebayes.factory;
+
+import org.algorithm.core.AlgorithmExecutor;
+import org.algorithm.core.cnn.model.RelationExtractionEnsembleModel;
+import org.diagbot.common.push.naivebayes.core.AlgorithmNaiveBayesExecutor;
+
+/**
+ * @Description:
+ * @Author: HUJING
+ * @Date: 2019/9/10 15:25
+ */
+public class AlgorithmNaiveBayesFactory {
+    private static AlgorithmNaiveBayesExecutor algorithmNaiveBayesExecutorInstance = null;
+
+    public static AlgorithmExecutor getInstance() {
+        try {
+            algorithmNaiveBayesExecutorInstance = (AlgorithmNaiveBayesExecutor) create(algorithmNaiveBayesExecutorInstance, AlgorithmNaiveBayesExecutor.class);
+        } catch (InstantiationException inst) {
+            inst.printStackTrace();
+        } catch (IllegalAccessException ille) {
+            ille.printStackTrace();
+        }
+        return algorithmNaiveBayesExecutorInstance;
+    }
+
+    private static Object create(Object obj, Class cls) throws InstantiationException, IllegalAccessException {
+        if (obj == null) {
+            synchronized (cls) {
+                obj = cls.newInstance();
+            }
+        }
+        return obj;
+    }
+}

+ 26 - 19
common-push/src/main/java/org/diagbot/common/push/work/ParamsDataProxy.java

@@ -1,25 +1,22 @@
 package org.diagbot.common.push.work;
 
-import com.alibaba.fastjson.JSON;
-import org.algorithm.core.cnn.AlgorithmCNNExecutor;
 import org.algorithm.core.cnn.AlgorithmCNNExecutorPacs;
 import org.algorithm.factory.RelationExtractionFactory;
 import org.apache.commons.lang3.StringUtils;
-import org.diagbot.common.push.bean.FeatureRate;
-import org.diagbot.common.push.bean.ResponseData;
-import org.diagbot.common.push.bean.ResultMappingFilter;
 import org.diagbot.common.push.bean.SearchData;
-import org.diagbot.common.push.cache.ApplicationCacheUtil;
 import org.diagbot.common.push.util.PushConstants;
 import org.diagbot.nlp.feature.FeatureAnalyze;
 import org.diagbot.nlp.feature.FeatureType;
 import org.diagbot.nlp.util.Constants;
-import org.diagbot.nlp.util.NegativeEnum;
+import org.diagbot.pub.utils.PropertiesUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.servlet.http.HttpServletRequest;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
@@ -30,6 +27,7 @@ import java.util.*;
  **/
 public class ParamsDataProxy {
     Logger logger = LoggerFactory.getLogger(ParamsDataProxy.class);
+    private String featureNum = "";//特征提取范围
 
     public void createNormalInfo(SearchData searchData) throws Exception {
         //计算年龄区间
@@ -84,6 +82,10 @@ public class ParamsDataProxy {
         //获取入参中的特征信息
         FeatureAnalyze fa = new FeatureAnalyze();
         List<Map<String, Object>> featuresList = new ArrayList<>();
+        //获取配置文件中的特征范围
+        PropertiesUtil propertiesUtil = new PropertiesUtil("nlp.properties");
+        featureNum = propertiesUtil.getProperty("push.feature.num");
+        fa.setFeatureNum(featureNum);
         if (!StringUtils.isEmpty(searchData.getSymptom())) {
             //提取现病史
             featuresList = fa.start(searchData.getSymptom(), FeatureType.FEATURE);
@@ -126,16 +128,21 @@ public class ParamsDataProxy {
             featuresList = fa.start(searchData.getDiag(), FeatureType.DIAG);
             paramFeatureInit(searchData, featuresList);
         }
-        if (!StringUtils.isEmpty(searchData.getPacs())) {
-            //关系抽取模型
-            AlgorithmCNNExecutorPacs algorithmCNNExecutor = RelationExtractionFactory.getInstance();
-            RelationExtractionUtil re = new RelationExtractionUtil();
-            //Pacs原始分词结果
-            List<List<String>> execute = algorithmCNNExecutor.execute(searchData.getPacs(), re.createTriad(searchData));
-            if (execute != null && execute.size() > 0) {
-                re.addToSearchDataInputs(execute, searchData);
-            }
-        }
+//        if (!StringUtils.isEmpty(searchData.getPacs())) {
+//            //关系抽取模型
+//            AlgorithmCNNExecutorPacs algorithmCNNExecutor = RelationExtractionFactory.getInstance();
+//            RelationExtractionUtil re = new RelationExtractionUtil();
+//            //Pacs原始分词结果
+//            List<List<String>> execute = algorithmCNNExecutor.execute(searchData.getPacs(), re.createTriad(searchData));
+//            if (execute != null && execute.size() > 0) {
+//                re.addToSearchDataInputs(execute, searchData);
+//            }
+//        }
+        //模型需要病历文本信息传入
+        Map<String, String> map = new HashMap<>();
+        map.put("sentence", searchData.getSymptom());
+        searchData.getInputs().put("sentence", map);
+
     }
 
     /**

+ 23 - 7
graph-web/src/main/java/org/diagbot/graphWeb/work/GraphCalculate.java

@@ -19,6 +19,7 @@ import javax.servlet.http.HttpServletRequest;
 import java.util.*;
 
 import org.diagbot.nlp.rule.module.PreResult;
+import org.diagbot.nlp.util.Constants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -39,8 +40,6 @@ public class GraphCalculate {
         long starttime = System.currentTimeMillis();
         System.out.println("Start at: " + starttime);
         ResponseData responseData = new ResponseData();
-//        ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
-//        paramsDataProxy.createSearchData(request, searchData);
         List<String> inputList = new ArrayList<>();
         int age = searchData.getAge();
         String sex = searchData.getSex();
@@ -75,8 +74,10 @@ public class GraphCalculate {
         //计算诊断
         Map<String, Object> condition =null;
         Map<String, Map<String, String>> excludelist = null;
-        if(featureTypeList.contains("2")){
+        starttime = System.currentTimeMillis();
+        if(featureTypeList.contains(Constants.feature_type_diag)){
             condition = neo4jAPI.getNewCondition((String[]) inputList.toArray(new String[inputList.size()]),webDiag );
+            System.out.println("推送诊断消耗:"+(System.currentTimeMillis()-starttime)+"s");
             // 查找需要排除的诊断
             excludelist = neo4jAPI.getExcludeDiag(inputList);
             responseData.setExcludeDiag(Arrays.asList(excludelist.keySet().stream().toArray(String[]::new)));
@@ -129,32 +130,38 @@ public class GraphCalculate {
             }
         }
         logger.info("页面导入的所有化验项为 :" +lisSet);
+        starttime = System.currentTimeMillis();
         //走治疗
-        if (StringUtils.isNotEmpty(diseaseName) && featureTypeList.contains("6")) {
+        if (StringUtils.isNotEmpty(diseaseName) && featureTypeList.contains(Constants.feature_type_treat)) {
             // 查找页面诊断里是否有不良反应
             Map<String, List<String>> disUE = neo4jAPI.getDisUE(diseaseName, diseaseType);
             //根据页面输入内容推出的不良反应集合
             Set<String> ue = neo4jAPI.getUe((String[]) inputList.toArray(new String[inputList.size()]));
             //走平常诊断治疗
-            Map<String, Filnlly> mulDiseaseTreat = neo4jAPI.getMulDiseaseTreat_2(diseaseName,webDiag, diseaseType, diseaseSet,disUE,ue,String.join(",", inputList));
-            responseData.setTreat(mulDiseaseTreat);
+//            Map<String, Filnlly> mulDiseaseTreat = neo4jAPI.getMulDiseaseTreat_2(diseaseName,webDiag, diseaseType, diseaseSet,disUE,ue,String.join(",", inputList));
+            Map<String, Filnlly> mulDiseaseTreat_new = neo4jAPI.getMulDiseaseTreat_new(diseaseName,webDiag, diseaseType, diseaseSet,disUE,ue,String.join(",", inputList));
+            System.out.println("推送治疗消耗:"+(System.currentTimeMillis()-starttime)+"s");
+            responseData.setTreat(mulDiseaseTreat_new);
         }
         //管理评估(慢病才有)
         if (featureTypeList.contains("11") && diseaseType == 1 && diseaseType != null) {
             logger.info("featureTypeList 包含11,走管理评估!!!");
+            starttime = System.currentTimeMillis();
             if(webDiag != null){
                 MangementEvaluation mangementEvaluation = neo4jAPI.pushMe(webDiagList,lis_Result);
+                System.out.println("推送管理评估消耗:"+(System.currentTimeMillis()-starttime)+"s");
                 Map<String, JSONObject> mangementEvaluation1 = mangementEvaluation.getMangementEvaluation();
                 responseData.setManagementEvaluation(mangementEvaluation1);
             }
         }
         //指标推送
         if (featureTypeList.contains("22") ) {
+            starttime = System.currentTimeMillis();
             List<MedicalIndication> idns =new ArrayList<>();
             Set<String> newindSet = new HashSet<>();
             Set<String> newindSet1 = new HashSet<>();
             //查找指标
-            Set<String> indSet = neo4jAPI.getInd((String[]) inputList.toArray(new String[inputList.size()]),sex,age);
+            Set<String> indSet = neo4jAPI.getInd((String[]) inputList.toArray(new String[inputList.size()]),sex,age,webDiagList);
             if(indSet != null && indSet.size()>0){
                 for (String ind:indSet) {
                     if("肾功能不全".equals(ind)){
@@ -195,6 +202,7 @@ public class GraphCalculate {
                     idns.add(medicalIndication);
                 }
             }
+            System.out.println("推送指标消耗:"+(System.currentTimeMillis()-starttime)+"s");
             responseData.setMedicalIndications(idns);
         }
         //诊断推送
@@ -203,4 +211,12 @@ public class GraphCalculate {
         System.out.println("Total takes: " + (System.currentTimeMillis()-starttime)/1000d + 's');
         return responseData;
     }
+    /**
+     * 推送化验,辅检,体征
+     */
+    public Map<String, List<FeatureRate>> getLisPacs(HttpServletRequest request, SearchData searchData) {
+
+        Map<String, List<FeatureRate>> lisPacs = neo4jAPI.getLisPacs(searchData);
+        return lisPacs;
+    }
 }

تفاوت فایلی نمایش داده نمی شود زیرا این فایل بسیار بزرگ است
+ 352 - 875
graph/src/main/java/org/diagbot/graph/jdbc/Neo4jAPI.java


+ 0 - 1
graph/src/main/java/org/diagbot/graph/jdbc/gdbtest.java

@@ -65,7 +65,6 @@ public class gdbtest {
             JSONArray jarray = jobj.getJSONArray("lis");
 
             Neo4jAPI napi = new Neo4jAPI(DriverManager.newDrive("192.168.3.201:7687", "neo4j", "123456"));
-            lisres = napi.AnalysisLISResult(jarray);
 
             for (String item:lisres) {
                 System.out.println(item);

+ 23 - 18
graph/src/main/resources/bolt.properties

@@ -7,7 +7,7 @@ pass_235 = diagbot@20180822
 #\u7EBF\u4E0A\u4F7F\u7528
 bolt.uri=bolt://192.168.2.233
 bolt.user=neo4j
-bolt.passwd=123456
+bolt.passwd=root
 
 
 #\u6D4B\u8BD5\u4F7F\u7528
@@ -22,9 +22,6 @@ match (n)-[r:\u5C5E\u4E8E]->(m)-[r1:\u7EC4\u5408]->(k) \n \
 where n.name= row \n \
 return m.name as condition, count(distinct r)>=m.path as jundgement, labels(m)[0] as label,k.name as standName
 
-searchCollection1=match (n)-[r:\u5C5E\u4E8E]->(m)-[r1:\u7EC4\u5408]->(k) \n \
-where n.name in fildList \n \
-return m.name as condition, count(distinct r)>=m.path as jundgement, labels(m)[0] as label,k.name as standName
 #\u67E5\u627E\u8FD1\u4E49\u8BCD\u7CBE\u534E\u7248
 serchCollect=match (n)-[r:\u8BCA\u65AD\u4F9D\u636E|:\u8FD1\u4E49\u8BCD]->(e) where n.name in fildList return n.name as fild,collect(distinct type(r)) as typeCollect
 #\u67E5\u627E\u80FD\u63A8\u51FA\u8BCA\u65AD\u4F9D\u636E\u7684\u8BCD
@@ -36,14 +33,6 @@ match (n)-[r:\u8BCA\u65AD\u4F9D\u636E]->(m)\n \
 where n.name= row\n \
 return m.name as condition, count(distinct r)>=m.path as jundgement, labels(m)[0] as label
 
-searchCondition1=match (l)-[r:\u8BCA\u65AD\u4F9D\u636E]->(m)\n \
-where l.name in newList\n \
-with m,fildList as data\n \
-match (n)-[r:\u8BCA\u65AD\u4F9D\u636E]->(m)\n \
-where n.name in data\n \
-with m.name as condition, count(distinct r) as sd,m.path as jundgement\n \
-where sd>=jundgement\n \
-return condition
 #\u67E5\u627E\u786E\u8BCA,\u62DF\u8BCA\u7684\u8BED\u53E5
 searchQuezhen=match (n)-[r:\u786E\u8BCA|:\u62DF\u8BCA]->(m:Disease)\n \
 where n.name in fildList\n \
@@ -53,7 +42,8 @@ return m.name as name, labels(m)[0] as label,type(r) as relationType
 #\u63A8\u9001\u786E\u8BCA,\u65B0\u7ED3\u6784,\u5305\u542B\u5173\u8054\u8BCD
 #\u67E5\u627E\u8FD9\u4E2A\u8BCD\u5C5E\u4E8E\u54EA\u4E2A\u5E8F\u53F7\u96C6\u5408
 searchNumColl=match(h)-[r:\u5C5E\u4E8E|:\u8BCA\u65AD\u4F9D\u636E]->(c:Condition)\n \
-where h.name in startList\n \
+where (h:Symptom or h:Vital or h:LISResult or h:PACSResult or h:History or h:Other or h:Cause or h:Prognosis or h:PrevailHistory or h:Disease or h:Indicators or h:UntowardEffect) and \
+ h.name in startList\n \
 with c.name as v,count(distinct r)>=c.path as hh\n \
 where hh =true\n \
 return v
@@ -75,7 +65,7 @@ return m.name as name, labels(m)[0] as label,type(r) as relationType;
 excludeDiag=match (ex)-[q:\u6392\u9664\u4F9D\u636E]-(c)-[r:\u6392\u9664]->(d:Disease) where ex.name in infostr return d
 #\u6CBB\u7597\u5904\u7406
 #\u67E5\u627E\u8BCA\u65AD\u5BF9\u5E94\u7684\u7C7B\u548C\u836F,\u4EE5\u53CA\u4ED6\u4EEC\u7684\u6392\u5E8F
-searchDrugsMedic=match (d:Disease{name:diseaseName})-[r0:\u63A8\u8350]->(m:Drugs)-[:\u5305\u542B]->(n:Medicine),(d:Disease)-[r1:\u63A8\u8350]->(n)\n \
+searchDrugsMedic=match (d:Disease{name:diseaseName})-[r:\u6CBB\u7597\u65B9\u6848]->(t:Treat)-[r0:\u63A8\u8350]->(m:Drugs)-[:\u5305\u542B]->(n:Medicine),(t:Treat)-[r1:\u63A8\u8350]->(n)\n \
 where r0.p<6 \n \
 return m.name as \u7C7B, r0.p as sort, n.name as \u836F\u7269, r1.rate as a order by sort , a desc
 #\u67E5\u627E\u8BCA\u65AD\u5BF9\u5E94\u7684\u836F\u7269\u7684\u5927\u5C0F\u7C7B
@@ -100,11 +90,26 @@ where n.name in fildList\n \
 with distinct m,r\n \
 return m.name as name
 
-#\u67E5\u627E\u4E00\u4E9B\u8BCD\u662F\u5426\u5728\u56FE\u8C31\u4E2D
-searchWords=match(d) where d.name in fildList return distinct d.name as name
 #\u6839\u636E\u75BE\u75C5\u67E5\u627E\u76F8\u5E94\u7684Lis Pacs
-serchLisPacs=match (d:Disease)-[r1:\u63A8\u8350]->(m)\n \
-where d.name in diseaseNmae return distinct d.name as name,labels(m)[0] as lei,m.name as n
+serchLisPacs=unwind disList as row \n \
+  match (d:Disease)-[r1:\u63A8\u8350|:\u8868\u73B0]->(m) where d.name=row return  labels(m)[0] as label,m.name as name
+
+# \u65B0\u7ED3\u6784\u6CBB\u7597\u67E5\u8BE2
+searchTreat=match(h:Disease{name:disName})-[r:\u6CBB\u7597\u65B9\u6848]->(t:Treat)-[r2:\u63A8\u8350]->(m:Medicine)-[r1:\u5C5E\u4E8E]->(d:Drugs)\n \
+with t,m,d\n \
+match(t)-[r:\u63A8\u8350]->(m)-[r1:\u5C5E\u4E8E]->(h:Drugs)\n \
+where d.name=h.bigDrugs \n \
+with t,m,d,h\n \
+match(m)<-[R:\u614E\u7528|:\u5FCC\u7528]-(i) where i.name in filds\n \
+return t.name as treat,d.name as big,h.name as sub,m.name as med,type(R) as ty\n \
+order by h.p\n \
+union\n \
+match(h:Disease{name:disName})-[r:\u6CBB\u7597\u65B9\u6848]->(t:Treat)-[r2:\u63A8\u8350]->(m:Medicine)-[r1:\u5C5E\u4E8E]->(d:Drugs)\n \
+with t,m,d\n \
+match(t)-[r:\u63A8\u8350]->(m)-[r1:\u5C5E\u4E8E]->(h:Drugs)\n \
+where d.name=h.bigDrugs and h.p<6 \n \
+return t.name as treat,d.name as big,h.name as sub, m.name as med,null as ty\n \
+order by h.p
 
 #\u66F4\u65B0\u8BCA\u65AD\u4F9D\u636E\u7684path
 updateConditionPath=match (n:Condition)<-[:\u8BCA\u65AD\u4F9D\u636E]-(m) \n \

+ 5 - 0
nlp-web/pom.xml

@@ -40,6 +40,11 @@
 			<artifactId>algorithm</artifactId>
 			<version>1.0.0</version>
 		</dependency>
+		<dependency>
+			<groupId>org.diagbot</groupId>
+			<artifactId>common-push</artifactId>
+			<version>1.0.0</version>
+		</dependency>
 		<dependency>
 			<groupId>org.diagbot</groupId>
 			<artifactId>common-service</artifactId>

+ 26 - 5
nlp-web/src/main/java/org/diagbot/nlp/controller/FeatureController.java

@@ -2,6 +2,8 @@ package org.diagbot.nlp.controller;
 
 import com.github.pagehelper.PageInfo;
 import org.apache.commons.lang3.StringUtils;
+import org.diagbot.common.push.bean.RelevantFeature;
+import org.diagbot.common.push.cache.ApplicationCacheUtil;
 import org.diagbot.nlp.common.NlpWebConstants;
 import org.diagbot.nlp.dao.model.Feature;
 import org.diagbot.nlp.dao.model.Info;
@@ -19,6 +21,7 @@ import org.diagbot.nlp.util.NlpCache;
 import org.diagbot.nlp.util.NlpUtil;
 import org.diagbot.pub.api.Response;
 import org.diagbot.pub.jdbc.MysqlJdbc;
+import org.diagbot.pub.utils.PropertiesUtil;
 import org.diagbot.pub.web.BaseController;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.core.env.Environment;
@@ -45,6 +48,7 @@ public class FeatureController extends BaseController<Feature, FeatureWrapper, L
     private Map<String, String> propelVital= null;
     private Map<String, String> propelLis = null;
     private Map<String, String> propelPacs = null;
+    private Map<String, RelevantFeature> relevant_feature_map = null;
 
     {
         listView = "pages/doc/feature/list";
@@ -151,12 +155,19 @@ public class FeatureController extends BaseController<Feature, FeatureWrapper, L
             Map<String, String> propel = null;
             List<Map<String, Object>> data = new ArrayList<>();
 
-           /* PropertiesUtil propertiesUtil = new PropertiesUtil("nlp.properties");
-            featureNum = propertiesUtil.getProperty("push.feature.num");*/
+            PropertiesUtil propertiesUtil = new PropertiesUtil("nlp.properties");
+            featureNum = propertiesUtil.getProperty("extract.feature.num");
+            Boolean filter = false;//判断是否需要过滤
+            if(propertiesUtil.getProperty("extract.feature.filter").equals("1")){//配置为1时过滤
+                filter = true;
+            }
+            if (filter){
+                relevant_feature_map = ApplicationCacheUtil.get_relevant_feature();
+            }
             FeatureAnalyze sa = new FeatureAnalyze();
             for (String featureType : featureTypes.split(",")) {
                 for (String feature : featureNum.split(",")) {
-                    /*sa.setFeatureNum(feature);*/
+                    sa.setFeatureNum(feature);
                     for (Info info : docInfos) {
                         switch (FeatureType.parse(featureType)) {
                             case SYMPTOM:
@@ -168,7 +179,7 @@ public class FeatureController extends BaseController<Feature, FeatureWrapper, L
                                 propel = propelSymptom;
                                 break;
                             case FEATURE:
-                                content = info.getPresent();
+                                content = info.getChief() + info.getPresent() + info.getVital();
                                 propel = propelSymptom;
                                 break;
                             case VITAL:
@@ -203,7 +214,17 @@ public class FeatureController extends BaseController<Feature, FeatureWrapper, L
                             } else {
                                 featureMap.put("is_push", "1");
                             }
-                            data.add(featureMap);
+                            if(filter){//过滤过的结果
+                                if(relevant_feature_map != null){
+                                    if(relevant_feature_map.get(info.getDiag()) != null){//获取对应疾病特征信息
+                                        if(relevant_feature_map.get(info.getDiag()).getFeature().equals(featureMap.get("feature_name"))){//与特征信息作对比
+                                            data.add(featureMap);
+                                        }
+                                    }
+                                }
+                            }else {
+                                data.add(featureMap);
+                            }
                         }
 
                     }

+ 1 - 1
nlp-web/src/main/resources/application.yml

@@ -12,7 +12,7 @@ spring:
       charset: UTF-8
       enabled: true
   datasource:       # mybatis 配置,使用druid数据源
-    url: jdbc:mysql://1.1.1.1:3306/diagbot-app?useUnicode=true&characterEncoding=UTF-8
+    url: jdbc:mysql://192.168.2.235:3306/med-s?useUnicode=true&characterEncoding=UTF-8
     username: root
     password: diagbot@20180822
     type: com.alibaba.druid.pool.DruidDataSource

+ 4 - 4
nlp/src/main/java/org/diagbot/nlp/feature/FeatureAnalyze.java

@@ -18,7 +18,7 @@ import java.util.Map;
 
 public class FeatureAnalyze {
     private LexemePath<Lexeme> lexemePath = null;
-/*    private String featureNum = "";//特征提取范围*/
+    private String featureNum = "";//特征提取范围
 
     Logger logger = LoggerFactory.getLogger(FeatureAnalyze.class);
 
@@ -62,7 +62,7 @@ public class FeatureAnalyze {
 //        }
 //        logger.info("分词文本结果:" + lexeme_text);
         lexemePath = replaceLexeme(lexemePath);
-        /*caseToken.getFeatureSize(featureNum);*/
+        caseToken.getFeatureSize(featureNum);
         return caseToken.analyze(lexemePath);
     }
 
@@ -101,8 +101,8 @@ public class FeatureAnalyze {
         return lexemePath;
     }
 
-  /*  public String setFeatureNum(String featureNum){
+    public String setFeatureNum(String featureNum){
         this.featureNum = featureNum;
         return featureNum;
-    }*/
+    }
 }

+ 6 - 13
nlp/src/main/java/org/diagbot/nlp/feature/extract/CaseToken.java

@@ -1,5 +1,6 @@
 package org.diagbot.nlp.feature.extract;
 
+import org.apache.commons.lang3.StringUtils;
 import org.diagbot.nlp.participle.word.Lexeme;
 import org.diagbot.nlp.participle.word.LexemePath;
 import org.diagbot.nlp.util.Constants;
@@ -22,7 +23,7 @@ public abstract class CaseToken {
     protected Lexeme leftFeatureLexeme = null;
     protected Lexeme rightFeatureLexeme = null;
     protected int sn = 0;
-/*    protected String featureSize = "";*/
+    protected String featureSize = "";
 
     static {
         Arrays.sort(ignore_symbol);
@@ -103,7 +104,7 @@ public abstract class CaseToken {
             }
         }
         if (!hasFeature) {
- /*           if (StringUtils.isNotEmpty(featureSize)) {
+            if (StringUtils.isNotEmpty(featureSize)) {
                 if(featureSize.equals("all")){//featureSize为all时提取所有特征
                     Map<String, Object> fMap = new HashMap<>(10);
                     fMap.put("feature_name", lexeme.getText());
@@ -125,19 +126,11 @@ public abstract class CaseToken {
                         featuresList.add(fMap);
                     }
                 }
-            }*/
-            Map<String, Object> fMap = new HashMap<>(10);
-            fMap.put("feature_name", lexeme.getText());
-            fMap.put("feature_type", featureType);
-            fMap.put("negative", key);
-            fMap.put("sn", String.valueOf(sn++));
-            fMap.put("property", lexeme.getProperty());
-            fMap.put("concept", lexeme.getConcept());
-            featuresList.add(fMap);
+            }
         }
     }
-  /*  public void getFeatureSize(String fetureSize){
+    public void getFeatureSize(String fetureSize){
         this.featureSize = fetureSize;
-    }*/
+    }
 }
 

+ 3 - 1
nlp/src/main/java/org/diagbot/nlp/feature/extract/CaseTokenFeature.java

@@ -14,7 +14,9 @@ import java.util.Map;
 public class CaseTokenFeature extends CaseToken {
     private NegativeEnum[] nees_symptom = new NegativeEnum[]{NegativeEnum.SYMPTOM,
             NegativeEnum.BODY_PART, NegativeEnum.PROPERTY, NegativeEnum.DEEP, NegativeEnum.DISEASE,
-            NegativeEnum.CAUSE, NegativeEnum.VITAL_RESULT, NegativeEnum.DIAG_STAND};
+            NegativeEnum.CAUSE, NegativeEnum.VITAL_RESULT, NegativeEnum.VITAL_INDEX_VALUE, NegativeEnum.DIAG_STAND,
+            NegativeEnum.SYMPTOM_PERFORMANCE, NegativeEnum.MEDICINE,NegativeEnum.MEDICINE_NAME, NegativeEnum.MEDICINE_PRD,
+            NegativeEnum.OPERATION, NegativeEnum.TREATMENT, NegativeEnum.SYMPTOM_INDEX, NegativeEnum.LIS_RESULT, NegativeEnum.PACS_RESULT};
 
     {
         stop_symbol = NlpUtil.extendsSymbol(stop_symbol, new String[]{",", ",", ":", ":"});

+ 7 - 1
nlp/src/main/java/org/diagbot/nlp/util/NegativeEnum.java

@@ -12,7 +12,7 @@ public enum NegativeEnum {
     SYMPTOM_PERFORMANCE("26"), NUMBER_QUANTIFIER("27"), DIGITS("28"),
     OTHER("44"),
     VITAL_INDEX("33"), VITAL_INDEX_VALUE("34"), VITAL_RESULT("35"),
-    ADDRESS("36"), PERSON("38"), PERSON_FEATURE_DESC("39"), PUB_NAME("46"),
+    ADDRESS("36"), PERSON("38"), PERSON_FEATURE_DESC("39"), PUB_NAME("46"), MEDICINE_NAME("53"),MEDICINE_PRD("54"),
     RETURN_VISIT("68"), DIAG_STAND("70");
     private String value;
 
@@ -150,6 +150,12 @@ public enum NegativeEnum {
             case "46":
                 negativeEnum = NegativeEnum.PUB_NAME;
                 break;
+            case "53":
+                negativeEnum = NegativeEnum.MEDICINE_NAME;
+                break;
+            case "54":
+                negativeEnum = NegativeEnum.MEDICINE_PRD;
+                break;
             case "68":
                 negativeEnum = NegativeEnum.RETURN_VISIT;
                 break;

+ 6 - 2
nlp/src/main/resources/nlp.properties

@@ -2,5 +2,9 @@
 cache.file.dir=/opt/diagbot-push/cache_file/
 #cache.file.dir=e:\\cache_file\\
 
-#特征提取范围(不限制范围时配置:all)
-#push.feature.num=all
+#抽取时——特征提取范围(不限制范围时配置:all)
+extract.feature.num=all
+#推送时——特征提取范围(不限制范围时配置:all)
+push.feature.num=all
+#是否过滤(0.不过滤 1.过滤)
+extract.feature.filter=0

+ 11 - 25
push-web/src/main/java/org/diagbot/push/controller/AlgorithmController.java

@@ -80,7 +80,7 @@ public class AlgorithmController extends BaseController {
         Response<ResponseData> response = new Response<>();
 
         MysqlJdbc nlpJdbc = new MysqlJdbc("root", "diagbot@20180822", "jdbc:mysql://192.168.2.235:3306/med-s?useUnicode=true&characterEncoding=UTF-8");
-        List<Map<String, String>> data = nlpJdbc.query("doc_info_validate", new String[] { "pk_dcpv", "present", "diag", "sex", "age" }, "");
+        List<Map<String, String>> data = nlpJdbc.query("doc_info", new String[] { "pk_dcpv", "present", "diag", "sex", "age" }, "");
 
         Map<String, String> diags = NlpCache.getStandard_info_synonym_map().get(Constants.word_property_diagnose);
 
@@ -88,8 +88,8 @@ public class AlgorithmController extends BaseController {
         List<Map<String, Object>> wheres = new ArrayList<>();
         for (Map<String, String> map : data) {
             SearchData searchData = new SearchData();
-            searchData.setAge(Integer.parseInt(map.get("age")));
-            searchData.setSex(map.get("sex"));
+//            searchData.setAge(Integer.parseInt(map.get("age")));
+//            searchData.setSex(map.get("sex"));
             searchData.setSymptom(map.get("present"));
             searchData.setFeatureType("2");
             searchData.setSysCode("2");
@@ -109,7 +109,7 @@ public class AlgorithmController extends BaseController {
                 } else if ((fr.getFeatureName().equals(map.get("diag")) || fr.getFeatureName().equals(diags.get(map.get("diag")))) && i < 5) {
                     line.put("check_5", "1");
                 }
-                if (i > 0 && i < 5) {
+                if (i > 0 && i < 10) {
                     push_diag = push_diag + "," + fr.getFeatureName();
                 } else if (i == 0) {
                     push_diag = fr.getFeatureName();
@@ -122,7 +122,7 @@ public class AlgorithmController extends BaseController {
             wheres.add(where);
         }
 
-        nlpJdbc.update("doc_info_validate", updates, wheres);
+        nlpJdbc.update("doc_info", updates, wheres);
 
         return response;
     }
@@ -150,6 +150,7 @@ public class AlgorithmController extends BaseController {
         responseData.setCrisisDetails(crisisApplication.crisisContent(searchData));
 
         //大数据推送
+//        searchData.setLength(6);    //模型推送最多6个比较合理
         AlgorithmCore core = new AlgorithmCore();
         ResponseData bigDataResponseData = core.algorithm(request, searchData, responseData);
 
@@ -157,19 +158,6 @@ public class AlgorithmController extends BaseController {
         ResponseData graphResponseData = graphCalculate.calculate(request, searchData);
         if (graphResponseData.getDis().size() > 0) {
             List<FeatureRate> disFeatureRates = new ArrayList<>();
-            /*boolean isFind = false;
-            for (int i = 0; i < bigDataResponseData.getDis().size(); i++) {
-                FeatureRate bigdata_fr = bigDataResponseData.getDis().get(i);
-                isFind = false;
-                for (FeatureRate graph_fr : graphResponseData.getDis()) {
-                    if (graph_fr.getDesc().contains("拟诊")|| graph_fr.getDesc().contains("确诊") && bigdata_fr.getFeatureName().equals(graph_fr.getFeatureName())) {
-                        isFind = true;
-                    }
-                }
-                if (!isFind) {
-                    disFeatureRates.add(bigdata_fr);
-                }
-            }*/
             List<FeatureRate> bigdis = bigDataResponseData.getDis();
             List<FeatureRate> graphdis = graphResponseData.getDis();
             for (FeatureRate bg:graphdis) {
@@ -210,17 +198,15 @@ public class AlgorithmController extends BaseController {
 
         //推送管理评估
         bigDataResponseData.setManagementEvaluation(graphResponseData.getManagementEvaluation());
-        //知识图谱直接替换大数据中的检验检查数据
-        LisPacsCalculate lisPacsCalculate = new LisPacsCalculate();
-        graphResponseData = lisPacsCalculate.getLisPacs(request, searchData);
-
-        bigDataResponseData.setLabs(graphResponseData.getLabs());
-        bigDataResponseData.setPacs(graphResponseData.getPacs());
 
+        //知识图谱直接替换大数据中的检验检查数据
+        Map<String, List<FeatureRate>> lisPacs = graphCalculate.getLisPacs(request, searchData);
+        bigDataResponseData.setLabs(lisPacs.get("lisList"));
+        bigDataResponseData.setPacs(lisPacs.get("pacsList"));
         //体征结果和指标推送
         Map<String, String> vitalCache = CacheUtil.getVitalCache();
         List<String> featureList = Arrays.asList(searchData.getFeatureTypes());
-        List<FeatureRate> vitals = graphResponseData.getVitals();
+        List<FeatureRate> vitals = lisPacs.get("vitalResultList");
         if (featureList.contains(Constants.feature_type_vital_index) && this.getVital(vitalCache, vitals).size() > 0) {
             bigDataResponseData.setVitals(this.getVital(vitalCache, vitals));
 

+ 2 - 2
push-web/src/main/resources/static/pages/algorithm/list.html

@@ -400,13 +400,13 @@
             $('#diag_list').html("");
             $('#before_combine_diag_list').html("");
             startDiag('/algorithm/page_neural', '#symptom_list', '1', resourceType, '111', '1');
-            startDiag('/algorithm/page_neural', '#vital_list', '3,2,7', resourceType, '131', '3');
+            startDiag('/algorithm/page_neural', '#vital_list', '3,2,7,42', resourceType, '131', '3');
             startDiag('/algorithm/page_neural', '#lis_list', '4,2,7', resourceType, '141', '4');
             startDiag('/algorithm/page_neural', '#pacs_list', '5,2,7', resourceType, '151', '5');
 
         } else {
             startDiag('/algorithm/page_neural', '#symptom_list', '1', resourceType, '11', '1');
-            startDiag('/algorithm/page_neural', '#vital_list', '3,2,7', resourceType, '31', '3');
+            startDiag('/algorithm/page_neural', '#vital_list', '3,2,7,42', resourceType, '31', '3');
             startDiag('/algorithm/page_neural', '#lis_list', '4,2,7', resourceType, '41', '4');
             startDiag('/algorithm/page_neural', '#pacs_list', '5,2,7', resourceType, '51', '5');
 

+ 461 - 0
push-web/src/main/resources/static/pages/eyehospital/list.html

@@ -0,0 +1,461 @@
+<!DOCTYPE html>
+<html>
+<head>
+    <meta charset="utf-8">
+    <meta http-equiv="X-UA-Compatible" content="IE=edge">
+    <title>AdminLTE 2 | Invoice</title>
+    <!-- Tell the browser to be responsive to screen width -->
+    <meta content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" name="viewport">
+    <!-- Bootstrap 3.3.6 -->
+    <link rel="stylesheet" href="../bootstrap/css/bootstrap.min.css">
+    <!-- Font Awesome -->
+    <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.5.0/css/font-awesome.min.css">
+    <!-- Ionicons -->
+    <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/ionicons/2.0.1/css/ionicons.min.css">
+    <!-- Theme style -->
+    <link rel="stylesheet" href="../dist/css/AdminLTE.min.css">
+    <!-- AdminLTE Skins. Choose a skin from the css/skins
+         folder instead of downloading all of them to reduce the load. -->
+    <link rel="stylesheet" href="../dist/css/skins/_all-skins.min.css">
+
+    <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
+    <!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
+    <!--[if lt IE 9]>
+    <script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
+    <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
+    <![endif]-->
+    <style>
+        .interval {
+            padding: 1px 8px !important;
+        }
+
+        #my_file {
+            visibility: hidden; /* 隐藏 */
+        }
+    </style>
+</head>
+<body class="hold-transition skin-blue sidebar-mini">
+    <!-- Left side column. contains the logo and sidebar -->
+    <!-- Content Wrapper. Contains page content -->
+    <div class="content-wrapper">
+        <!-- Content Header (Page header) -->
+        <section class="content-header">
+            <h1>
+                眼科医院
+            </h1>
+            <!--<ol class="breadcrumb">
+                <li><a href="#"><i class="fa fa-dashboard"></i> Home</a></li>
+                <li><a href="#">Tables</a></li>
+                <li class="active">Data tables</li>
+            </ol>-->
+        </section>
+
+        <form role="form">
+            <div class="box-body">
+                <div class="form-group">
+                    <label for="symptom_id">主诉</label>&nbsp;
+                    <input type="text" id="chief_id" placeholder="" size="150">&nbsp;&nbsp;&nbsp;&nbsp;
+                </div>
+                <div class="form-group">
+                    <label for="symptom_id">现病史</label>&nbsp;
+                    <input type="text" id="symptom_id" placeholder="" size="150">&nbsp;&nbsp;&nbsp;&nbsp;
+                </div>
+                <div class="form-group">
+                    <label for="vital_id">专科检查</label>&nbsp;
+                    <input type="text" id="vital_id" placeholder="" size="150">&nbsp;&nbsp;&nbsp;&nbsp;
+                </div>
+                <div class="form-group">
+                    <label for="pacs_id">辅检</label>&nbsp;
+                    <input type="text" id="pacs_id" placeholder="" size="150">&nbsp;&nbsp;&nbsp;&nbsp;
+                </div>
+               <!-- <div class="form-group">
+                    <label for="symptom_id">性别</label>&nbsp;
+                    <select style="padding:2px;width: 150px;" id="sex">
+                        <option></option>
+                        <option value="M">男(M)</option>
+                        <option value="F">女(F)</option>
+                    </select>&nbsp;&nbsp;&nbsp;&nbsp;
+                    <label for="symptom_id">年龄</label>&nbsp;
+                    <input type="text" id="age" placeholder="">
+                </div>-->
+            </div>
+            <!-- /.box-body -->
+            <div class="box-footer">
+                <button type="button" class="btn btn-primary" onclick="clickes();">推送</button>
+            </div>
+        </form>
+
+        <!-- Main content -->
+        <section class="content">
+            <div class="row">
+                <div class="col-xs-12">
+                    <div class="box">
+                        <div class="box-header">
+                            <h3 class="box-title">推送结果</h3>&nbsp;&nbsp;
+                        </div>
+                        <div class="box-body" id="feature_inputs_div">
+                        </div>
+                    </div>
+                    <!-- /.box -->
+                </div>
+            </div>
+            <!-- /.row -->
+                <!-- /.col -->
+            <!-- /.row -->
+        </section>
+        <!-- /.content -->
+    </div>
+    <!-- /.content-wrapper -->
+
+    <!-- /.control-sidebar -->
+    <!-- Add the sidebar's background. This div must be placed
+         immediately after the control sidebar -->
+    <div class="control-sidebar-bg"></div>
+<!-- ./wrapper -->
+<div class="modal fade" id="modal-default">
+    <div class="modal-dialog">
+        <div class="modal-content">
+            <div class="modal-header">
+                <button type="button" class="close" data-dismiss="modal" aria-label="Close">
+                    <span aria-hidden="true">&times;</span></button>
+                <h4 class="modal-title">消息</h4>
+            </div>
+            <div class="row" id="modal-loading">
+                <!-- /.col -->
+                <div class="col-md-12">
+                    <div class="box box-danger box-solid">
+                        <div class="box-header">
+                            <h3 class="box-title">诊断归一</h3>
+                        </div>
+                        <div class="box-body">
+                            此过程可能需要较长时间,请耐心等待... ...
+                        </div>
+                        <!-- /.box-body -->
+                        <!-- Loading (remove the following to stop the loading)-->
+                        <div class="overlay">
+                            <i class="fa fa-refresh fa-spin"></i>
+                        </div>
+                        <!-- end loading -->
+                    </div>
+                    <!-- /.box -->
+                </div>
+                <!-- /.col -->
+            </div>
+            <!-- /.row -->
+            <div class="modal-body">
+                <p></p>
+            </div>
+            <div class="modal-footer">
+                <button type="button" class="btn btn-default pull-left" data-dismiss="modal">Close</button>
+            </div>
+        </div>
+        <!-- /.modal-content -->
+    </div>
+    <!-- /.modal-dialog -->
+</div>
+<!-- /.modal -->
+<!-- jQuery 2.2.3 -->
+<script src="../plugins/jQuery/jquery-2.2.3.min.js"></script>
+<!-- Bootstrap 3.3.6 -->
+<script src="../bootstrap/js/bootstrap.min.js"></script>
+<!-- DataTables -->
+<script src="../plugins/datatables/jquery.dataTables.min.js"></script>
+<script src="../plugins/datatables/dataTables.bootstrap.min.js"></script>
+<!-- SlimScroll -->
+<script src="../plugins/slimScroll/jquery.slimscroll.min.js"></script>
+<!-- FastClick -->
+<script src="../plugins/fastclick/fastclick.js"></script>
+<!-- AdminLTE App -->
+<script src="../dist/js/app.min.js"></script>
+<!-- AdminLTE for demo purposes -->
+<script src="../dist/js/demo.js"></script>
+
+<script src="../dist/js/push.js"></script>
+
+<script>
+    $(function () {
+    });
+    function clickes(){
+        alert("进来了");
+        $.ajax({
+            url:push_web_url+"/eyehospital/people",//访问的地址
+            type:"get",
+            dataType:'JSON',//后台返回的数据格式类型
+            success:function (data) {
+                alert("成功了");
+                $("#feature_inputs_div").append(data.name);
+            }
+        })
+    };
+    function bayesPage(resourceType) {
+        var diag = $("#diag_id").val();
+        var symptom = $("#symptom").val();
+        if (diag != '' && symptom == '') {
+            $('#diag_list').html("");
+            $('#before_combine_diag_list').html("");
+            startDiag('/algorithm/page_neural', '#symptom_list', '1', resourceType, '111', '1');
+            startDiag('/algorithm/page_neural', '#vital_list', '3,2,7', resourceType, '131', '3');
+            startDiag('/algorithm/page_neural', '#lis_list', '4,2,7', resourceType, '141', '4');
+            startDiag('/algorithm/page_neural', '#pacs_list', '5,2,7', resourceType, '151', '5');
+
+        } else {
+            startDiag('/algorithm/page_neural', '#symptom_list', '1', resourceType, '11', '1');
+            startDiag('/algorithm/page_neural', '#vital_list', '3,2,7', resourceType, '31', '3');
+            startDiag('/algorithm/page_neural', '#lis_list', '4,2,7', resourceType, '41', '4');
+            startDiag('/algorithm/page_neural', '#pacs_list', '5,2,7', resourceType, '51', '5');
+
+            startDiagMapping('/algorithm/page_neural', '#diag_list', '2', resourceType, '21', '2');
+            startDiagMapping('/algorithm/page_neural', '#before_combine_diag_list', '2', resourceType, '21', '6');
+        }
+    }
+
+    function startDiagMapping(url, obj, featureType, resourceType, algorithmClassify, tp) {
+        $(obj).DataTable({
+            "paging": false,
+            "bPaginate" : true,
+            "lengthChange": true,
+            "searching": false,
+            "ordering": false,
+            "info": false,
+            "autoWidth": false,
+            "serverSide": true,
+            "destroy": true,
+            "iDisplayLength": 25,
+            "columns": [
+                {"data": "featureName"},
+                {"data": "extraProperty"},
+                {"data": "rate"}
+            ],
+            "ajax": {
+                "url": push_web_url + url,
+                "data": function ( d ) {
+                    d.featureType = featureType;
+                    d.resourceType = resourceType;
+                    d.algorithmClassifyValue =  algorithmClassify;
+                    var symptom = $("#symptom_id").val();
+                    var vital = $("#vital_id").val();
+                    var past = $("#past_id").val();
+                    var other = $("#other_id").val();
+                    var lis = $("#lis_id").val();
+                    var pacs = $("#pacs_id").val();
+                    var lisOrder = $("#lis_order").val();
+                    var pacsOrder = $("#pacs_order").val();
+                    var diag = $("#diag_id").val();
+                    var length = $("#length").val();
+                    var sex = $("#sex").val();
+                    var age = $("#age").val();
+                    var age_start = $("#age_start").val();
+                    var age_end = $("#age_end").val();
+                    d.sysCode = "2";
+                    //添加额外的参数传给服务器
+                    if (symptom != null && symptom != undefined) {
+                        d.symptom = symptom;
+                    }
+                    if (vital != null && vital != undefined) {
+                        d.vital = vital;
+                    }
+                    if (past != null && past != undefined) {
+                        d.past = past;
+                    }
+                    if (other != null && other != undefined) {
+                        d.other = other;
+                    }
+                    if (lis != null && lis != undefined) {
+                        d.lis = lis;
+                    }
+                    if (pacs != null && pacs != undefined) {
+                        d.pacs = pacs;
+                    }
+                    if (lisOrder != null && lisOrder != undefined) {
+                        d.lisOrder = lisOrder;
+                    }
+                    if (pacsOrder != null && pacsOrder != undefined) {
+                        d.pacsOrder = pacsOrder;
+                    }
+                    if (diag != null && diag != undefined && diag != '') {
+                        d.diag = diag;
+                    }
+                    if (length != null && length != undefined) {
+                        d.length = length;
+                    }
+                    if (sex != null && sex != undefined) {
+                        d.sex = sex;
+                    }
+                    if (age != '' && age_start != age && age != undefined) {
+                        d.age = age;
+                    }
+                    if (age_start != '' && age_start != null && age_start != undefined) {
+                        d.age_start = age_start;
+                    }
+                    if (age_end != '' && age_end != null && age_end != undefined) {
+                        d.age_end = age_end;
+                    }
+                },
+                "dataSrc": function (json) {
+                    var inputs = json.data.inputs;
+                    var h = "";
+                    $.each(inputs, function (key, item) {
+                        h += "<div class='form-group'><label>" + key + "&nbsp;</label>";
+                        h += "</div>";
+                    });
+                    $("#feature_inputs_div").html(h);
+
+                    if (tp == '1') {
+                        $("#participle_symptom").html(json.data.participleSymptom);
+                        json.data = json.data.symptom;
+                    }
+                    if (tp == '2') {
+                        $("#participle_diag").html(json.data.participleSymptom);
+                        json.data = json.data.dis;
+                    }
+                    if (tp == '3') {
+                        $("#participle_vital").html(json.data.participleSymptom);
+                        json.data = json.data.vitals;
+                    }
+                    if (tp == '4') {
+                        $("#participle_lis").html(json.data.participleSymptom);
+                        json.data = json.data.labs;
+                    }
+                    if (tp == '5') {
+                        $("#participle_pacs").html(json.data.participleSymptom);
+                        json.data = json.data.pacs;
+                    }
+                    if (tp == '6') {
+                        $("#before_combine_participle_diag").html(json.data.participleSymptom);
+                        json.data = json.data.beforeCombineDis;
+                    }
+                    return json.data;
+                }
+            }
+        });
+    }
+
+
+    function startDiag(url, obj, featureType, resourceType, algorithmClassify, tp) {
+        $(obj).DataTable({
+            "paging": false,
+            "bPaginate" : true,
+            "lengthChange": true,
+            "searching": false,
+            "ordering": false,
+            "info": false,
+            "autoWidth": false,
+            "serverSide": true,
+            "destroy": true,
+            "iDisplayLength": 25,
+            "columns": [
+                {"data": "featureName"},
+                {"data": "rate"}
+            ],
+            "ajax": {
+                "url": push_web_url + url,
+                "data": function ( d ) {
+                    d.featureType = featureType;
+                    d.resourceType = resourceType;
+                    d.algorithmClassifyValue =  algorithmClassify;
+                    var symptom = $("#symptom_id").val();
+                    var vital = $("#vital_id").val();
+                    var past = $("#past_id").val();
+                    var other = $("#other_id").val();
+                    var lis = $("#lis_id").val();
+                    var pacs = $("#pacs_id").val();
+                    var lisOrder = $("#lis_order").val();
+                    var pacsOrder = $("#pacs_order").val();
+                    var diag = $("#diag_id").val();
+                    var length = $("#length").val();
+                    var sex = $("#sex").val();
+                    var age = $("#age").val();
+                    var age_start = $("#age_start").val();
+                    var age_end = $("#age_end").val();
+                    d.sysCode = "2";
+                    //添加额外的参数传给服务器
+                    if (symptom != null && symptom != undefined) {
+                        d.symptom = symptom;
+                    }
+                    if (vital != null && vital != undefined) {
+                        d.vital = vital;
+                    }
+                    if (past != null && past != undefined) {
+                        d.past = past;
+                    }
+                    if (other != null && other != undefined) {
+                        d.other = other;
+                    }
+                    if (lis != null && lis != undefined) {
+                        d.lis = lis;
+                    }
+                    if (pacs != null && pacs != undefined) {
+                        d.pacs = pacs;
+                    }
+                    if (lisOrder != null && lisOrder != undefined) {
+                        d.lisOrder = lisOrder;
+                    }
+                    if (pacsOrder != null && pacsOrder != undefined) {
+                        d.pacsOrder = pacsOrder;
+                    }
+                    if (diag != null && diag != undefined && diag != '') {
+                        d.diag = diag;
+                    }
+                    if (length != null && length != undefined) {
+                        d.length = length;
+                    }
+                    if (sex != null && sex != undefined) {
+                        d.sex = sex;
+                    }
+                    if (age != '' && age_start != age && age != undefined) {
+                        d.age = age;
+                    }
+                    if (age_start != '' && age_start != null && age_start != undefined) {
+                        d.age_start = age_start;
+                    }
+                    if (age_end != '' && age_end != null && age_end != undefined) {
+                        d.age_end = age_end;
+                    }
+                },
+                "dataSrc": function (json) {
+                    var inputs = json.data.inputs;
+                    var h = "";
+                    $.each(inputs, function (key, item) {
+                        h += "<div class='form-group'><label>" + key + ":&nbsp;</label>";
+                        $.each(item,function (k, t) {
+                            if  (t == null) {
+                                t = "";
+                            }
+                            h += "&nbsp;(<label>" + k + ":" + t + "</label>)&nbsp;";
+                        });
+                        h += "</div>";
+                    });
+                    $("#feature_inputs_div").html(h);
+
+                    if (tp == '1') {
+                        $("#participle_symptom").html(json.data.participleSymptom);
+                        json.data = json.data.symptom;
+                    }
+                    if (tp == '2') {
+                        $("#participle_diag").html(json.data.participleSymptom);
+                        json.data = json.data.dis;
+                    }
+                    if (tp == '3') {
+                        $("#participle_vital").html(json.data.participleSymptom);
+                        json.data = json.data.vitals;
+                    }
+                    if (tp == '4') {
+                        $("#participle_lis").html(json.data.participleSymptom);
+                        json.data = json.data.labs;
+                    }
+                    if (tp == '5') {
+                        $("#participle_pacs").html(json.data.participleSymptom);
+                        json.data = json.data.pacs;
+                    }
+                    if (tp == '6') {
+                        $("#before_combine_participle_diag").html(json.data.participleSymptom);
+                        json.data = json.data.beforeCombineDis;
+                    }
+                    return json.data;
+                }
+            }
+        });
+    }
+</script>
+</body>
+</html>