فهرست منبع

1、删除很多现在不需要的逻辑代码

louhr 5 سال پیش
والد
کامیت
b0c0027c47
29فایلهای تغییر یافته به همراه363 افزوده شده و 1715 حذف شده
  1. 0 281
      bigdata-web/src/main/java/org/diagbot/bigdata/common/ApplicationCacheUtil.java
  2. 0 98
      bigdata-web/src/main/java/org/diagbot/bigdata/common/InitListener.java
  3. 2 2
      bigdata-web/src/main/java/org/diagbot/bigdata/controller/AlgorithmController.java
  4. 0 22
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingDiagMapper.java
  5. 0 22
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingFilterMapper.java
  6. 0 22
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingVitalMapper.java
  7. 0 38
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingDiag.java
  8. 0 68
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingFilter.java
  9. 0 38
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingVital.java
  10. 0 12
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingDiagWrapper.java
  11. 0 6
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingFilterWrapper.java
  12. 0 7
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingVitalWrapper.java
  13. 0 55
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingDiagMapper.xml
  14. 0 67
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingFilterMapper.xml
  15. 0 77
      bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingVitalMapper.xml
  16. 0 8
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingDiagService.java
  17. 0 8
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingFilterService.java
  18. 0 8
      bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingVitalService.java
  19. 0 21
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingDiagServiceImpl.java
  20. 0 21
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingFilterServiceImpl.java
  21. 0 21
      bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingVitalServiceImpl.java
  22. 5 7
      bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java
  23. 94 0
      bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataParamsProxy.java
  24. 1 1
      bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataSearchData.java
  25. 0 702
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ParamsDataProxy.java
  26. 1 95
      bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java
  27. 2 6
      bigdata-web/src/test/java/org/diagbot/AddStandWordTest.java
  28. 2 2
      bigdata-web/src/main/java/org/diagbot/bigdata/util/BigDataConstants.java
  29. 256 0
      common-push/src/main/java/org/diagbot/common/push/work/ParamsDataProxy.java

+ 0 - 281
bigdata-web/src/main/java/org/diagbot/bigdata/common/ApplicationCacheUtil.java

@@ -1,281 +0,0 @@
-package org.diagbot.bigdata.common;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.common.javabean.Rule;
-import org.diagbot.nlp.participle.ParticipleUtil;
-import org.diagbot.nlp.participle.cfg.Configuration;
-import org.diagbot.nlp.participle.cfg.DefaultConfig;
-import org.diagbot.nlp.participle.word.Lexeme;
-import org.diagbot.nlp.participle.word.LexemePath;
-import org.diagbot.nlp.util.NegativeEnum;
-import org.diagbot.nlp.util.NlpCache;
-import org.diagbot.pub.utils.security.EncrypDES;
-
-import javax.servlet.ServletContext;
-import java.util.*;
-
-public class ApplicationCacheUtil {
-
-    //词库同义词定义
-    public static Map<String, Map<String, String>> standard_info_synonym_map = null;
-    //词库大小类定义
-    public static Map<String, String> standard_info_classify_map = null;
-    //树形结构存储大小类
-    public static Map<String, NlpCache.Node> standard_info_type_tree_map = null;
-//    体征衍射
-//    public static Map<String, String> doc_result_mapping_vital_map = null;
-    //诊断科室衍射
-    public static Map<String, String> doc_result_mapping_diag_map = null;
-    //特征性别 年龄过滤等
-    public static Map<String, Map<String, ResultMappingFilter>> doc_result_mapping_filter_map = null;
-    //诊断依据标准词
-    public static Map<String, List<Map<String, String>>> kl_result_mapping_standword_map = null;
-    // 规则
-    public static Map<String, List<Rule>> rule_filter_map = null;
-    public static Map<String, List<Rule>> kl_rule_filter_map = null;
-
-
-    public static Map<String, Map<String, String>> getStandard_info_synonym_map() {
-        if (standard_info_synonym_map == null) {
-            standard_info_synonym_map = NlpCache.getStandard_info_synonym_map();
-        }
-        return standard_info_synonym_map;
-    }
-
-    public static Map<String, String> getStandard_info_classify_map() {
-        if (standard_info_classify_map == null) {
-            standard_info_classify_map = NlpCache.getStandard_info_classify_map();
-        }
-        return standard_info_classify_map;
-    }
-
-    public static Map<String, NlpCache.Node> getStandard_info_type_tree_map() {
-        if (standard_info_type_tree_map == null) {
-            standard_info_type_tree_map = NlpCache.getStandard_info_type_tree_map();
-        }
-        return standard_info_type_tree_map;
-    }
-
-//    /**
-//     * 現已無用
-//     * @return
-//     */
-//    public static Map<String, String> getDoc_result_mapping_vital_map() {
-//        if (doc_result_mapping_vital_map == null) {
-//            Configuration configuration = new DefaultConfig();
-//            doc_result_mapping_vital_map = configuration.loadMapDict("doc_result_mapping_vital.dict");
-//        }
-//        return doc_result_mapping_vital_map;
-//    }
-
-    public static Map<String, String> getDoc_result_mapping_diag_map() {
-        if (doc_result_mapping_diag_map == null) {
-            createDoc_result_mapping_diag_map();
-        }
-        return doc_result_mapping_diag_map;
-    }
-
-    public static Map<String, String> createDoc_result_mapping_diag_map() {
-        Configuration configuration = new DefaultConfig();
-        doc_result_mapping_diag_map = configuration.loadMapDict("bigdata_diag_2_dept.dict");
-        return doc_result_mapping_diag_map;
-    }
-
-    public static Map<String, Map<String, ResultMappingFilter>> getDoc_result_mapping_filter_map() {
-        if (doc_result_mapping_filter_map == null) {
-            createDoc_result_mapping_filter_map();
-        }
-        return doc_result_mapping_filter_map;
-    }
-
-    public static Map<String, Map<String, ResultMappingFilter>> createDoc_result_mapping_filter_map() {
-        Configuration configuration = new DefaultConfig();
-        List<String> fileContents = configuration.readFileContents("bigdata_sex_age_filter.dict");
-        String[] line_string;
-        List<ResultMappingFilter> resultMappingFilters = new ArrayList<>();
-        try {
-            for (int i = 0; i < fileContents.size(); i++) {
-                line_string = org.apache.commons.lang3.StringUtils.split(fileContents.get(i), "\\|");
-                if (line_string.length == 5) {
-                    ResultMappingFilter resultMappingFilter = new ResultMappingFilter();
-                    resultMappingFilter.setFeatureName(line_string[0]);
-                    resultMappingFilter.setFeatureType(line_string[1]);
-                    resultMappingFilter.setSex(line_string[2]);
-                    resultMappingFilter.setAgeStart(Integer.parseInt(line_string[3]));
-                    resultMappingFilter.setAgeEnd(Integer.parseInt(line_string[4]));
-                    resultMappingFilters.add(resultMappingFilter);
-                }
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        doc_result_mapping_filter_map = new HashMap<>();
-        Map<String, ResultMappingFilter> filterMap = null;
-        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
-            filterMap = doc_result_mapping_filter_map.get(resultMappingFilter.getFeatureType());
-            if (filterMap == null) {
-                filterMap = new HashMap<>();
-            }
-            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
-            doc_result_mapping_filter_map.put(resultMappingFilter.getFeatureType(), filterMap);
-        }
-        return doc_result_mapping_filter_map;
-    }
-
-    public static Map<String, List<Map<String, String>>> getKl_result_mapping_standword_map() {
-        if (kl_result_mapping_standword_map == null) {
-            createKl_result_mapping_standword_map();
-        }
-        return kl_result_mapping_standword_map;
-    }
-
-    public static Map<String, List<Map<String, String>>> createKl_result_mapping_standword_map() {
-        kl_result_mapping_standword_map = new HashMap<>();
-        Configuration configuration = new DefaultConfig();
-        List<String> fileContents = configuration.readFileContents("bigdata_value_analyze.dict");
-        List<Map<String, String>> standWordObjValList = null;
-        Map<String, String> standWordObjVal = null;
-        String operation = ">=|≥|>|大于|>|超过|<=|≤|<|小于|<|少于";
-        try {
-            for (String fileContent : fileContents) {
-                LexemePath<Lexeme> lexemes = null;
-                String op = "";
-                String[] fileContentSplit = null;
-                //每一个标准词根据大于小于符号切开,不然进行分词时还是会得到原本的标准词
-                if (fileContent.contains(">") || fileContent.contains("大于")
-                        || fileContent.contains(">") || fileContent.contains("超过")) {
-                    op = ">";
-                    fileContentSplit = fileContent.split(operation);
-                } else if (fileContent.contains("<") || fileContent.contains("小于")
-                        || fileContent.contains("<") || fileContent.contains("少于")) {
-                    op = "<";
-                    fileContentSplit = fileContent.split(operation);
-                } else if (fileContent.contains(">=") || fileContent.contains("≥")){
-                    op = ">=";
-                    fileContentSplit = fileContent.split(operation);
-                } else if (fileContent.contains("<=") || fileContent.contains("≤")) {
-                    op = "<=";
-                    fileContentSplit = fileContent.split(operation);
-                } else {
-                    continue;
-                }
-                LexemePath<Lexeme> lexemeWord = null;
-                //每一个标准词切开后进行分词
-                for (String fileContentWords : fileContentSplit) {
-                    lexemeWord = ParticipleUtil.participle(fileContentWords);
-                    if (lexemeWord != null) {
-                        if (null == lexemes) {
-                            lexemes = lexemeWord;
-                        } else {
-                            for (Lexeme lexeme : lexemeWord) {
-                                lexemes.add(lexeme);
-                            }
-                        }
-                    }
-                }
-                String standWordObjKey = "";
-                standWordObjValList = new ArrayList<>();
-                standWordObjVal = new HashMap<>();
-                int i = 0;
-                for (Lexeme lexeme : lexemes) {
-                    i++;
-                    if (lexeme.getProperty().contains(",")) {
-                        setProterty(lexeme); //如果分词后词性有多个,只选一个(暂时只处理症状,体征)
-                    }
-                    NegativeEnum lexemeNegativeEnum = NegativeEnum.parseOfValue(lexeme.getProperty());
-                    if (lexemeNegativeEnum == NegativeEnum.SYMPTOM || lexemeNegativeEnum == NegativeEnum.CAUSE
-                            || lexemeNegativeEnum == NegativeEnum.VITAL_INDEX
-                            || lexemeNegativeEnum == NegativeEnum.DIAG_STAND) {
-                        if (!kl_result_mapping_standword_map.containsKey(lexeme.getText())) {
-                            kl_result_mapping_standword_map.put(lexeme.getText(), standWordObjValList);
-                        } else {
-                            standWordObjKey = lexeme.getText();
-                        }
-                    }
-                    if (lexemeNegativeEnum == NegativeEnum.DIGITS) {
-                        standWordObjVal.put("value", lexeme.getText());
-                    }
-                    if (lexemeNegativeEnum == NegativeEnum.UNIT
-                            || lexemeNegativeEnum == NegativeEnum.EVENT_TIME
-                            || lexemeNegativeEnum == NegativeEnum.OTHER) {
-                        standWordObjVal.put("unit", lexeme.getText().toLowerCase());
-                    }
-                    if (lexemes.size() == i) {
-                        standWordObjVal.put("op", op);
-                        standWordObjVal.put("standword", fileContent);
-                        if (kl_result_mapping_standword_map.containsKey(standWordObjKey)) {
-                            kl_result_mapping_standword_map.get(standWordObjKey).add(standWordObjVal);
-                        } else {
-                            standWordObjValList.add(standWordObjVal);
-                        }
-                    }
-                }
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-        return kl_result_mapping_standword_map;
-    }
-
-    public static Map<String, List<Rule>> getKl_rule_filter_map() {
-        if (kl_rule_filter_map == null) {
-            create_kl_rule_filter_map();
-        }
-        return kl_rule_filter_map;
-    }
-
-    public static void create_kl_rule_filter_map() {
-        kl_rule_filter_map = new HashMap<>();
-
-        Configuration configuration = new DefaultConfig();
-        List<String> fileContents = configuration.readFileContents("bigdata_rule_filter.dict");
-
-        List<Rule> rules = null;
-        for (String line:fileContents) {
-            String[] content = line.split("\\|");
-            Rule rule = new Rule();
-            if (content.length == 14) {
-                rule.setIdx_name(content[0]==null?"":content[0]);
-                rule.setSet_name(content[1]==null?"":content[1]);
-                rule.setSet_status(content[2]==null?"":content[2]);
-                rule.setMin_value(content[3]==null?"":content[3]);
-                rule.setMin_concept_text(content[4]==null?"":content[4]);
-                rule.setMax_value(content[5]==null?"":content[5]);
-                rule.setMax_concept_text(content[6]==null?"":content[6]);
-                rule.setSet_value(content[7]==null?"":content[7]);
-                rule.setSet_concept_text(content[8]==null?"":content[8]);
-                rule.setUnit(content[9]==null?"":content[9]);
-                rule.setType_value(content[10]==null?"":content[10]);
-                rule.setMinRemind(content[11]==null?"":content[11]);
-                rule.setMaxRemind(content[12]==null?"":content[12]);
-                rule.setSetRemind(content[13]==null?"":content[13]);
-                if (kl_rule_filter_map.get(rule.getIdx_name()) == null) {
-                    rules = new ArrayList<>();
-                } else {
-                    rules = kl_rule_filter_map.get(rule.getIdx_name());
-                }
-                rules.add(rule);
-                kl_rule_filter_map.put(rule.getIdx_name(), rules);
-            }
-        }
-    }
-
-    public static void setProterty(Lexeme lexeme) {
-        for (String featureType : lexeme.getProperty().split(",")) {
-            switch (featureType) {
-                case "1":
-                    lexeme.setProperty("1");
-                    break;
-                case "33":
-                    lexeme.setProperty("33");
-                    break;
-                case "70":
-                    lexeme.setProperty("70");
-                    break;
-
-            }
-        }
-    }
-}

+ 0 - 98
bigdata-web/src/main/java/org/diagbot/bigdata/common/InitListener.java

@@ -1,98 +0,0 @@
-package org.diagbot.bigdata.common;
-
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.service.ResultMappingDiagService;
-import org.diagbot.bigdata.service.ResultMappingFilterService;
-import org.diagbot.bigdata.service.ResultMappingVitalService;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.nlp.participle.cfg.Configuration;
-import org.diagbot.nlp.participle.cfg.DefaultConfig;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-import javax.servlet.annotation.WebListener;
-import java.util.*;
-
-@WebListener
-public class InitListener implements ServletContextListener {
-    @Autowired
-    ResultMappingVitalService resultMappingVitalService;
-    @Autowired
-    ResultMappingDiagService resultMappingDiagService;
-    @Autowired
-    ResultMappingFilterService resultMappingFilterService;
-
-    public void contextDestroyed(ServletContextEvent arg0) {
-
-    }
-
-    /**
-     * 开始初始化数据
-     *
-     * @return
-     */
-    public void contextInitialized(ServletContextEvent event) {
-//        contextStandardLibraryInitialized(event);
-//        contextFeatureMappingInitialized(event);
-//        contextResultMappingDiagInitialized(event);
-//        contextResultMappingFilterInitialized(event);
-    }
-
-//    public void contextStandardLibraryInitialized(ServletContextEvent event) {
-//        ApplicationCacheUtil applicationCacheUtil = new ApplicationCacheUtil();
-//        applicationCacheUtil.putStandardInfoContext(event.getServletContext());
-//    }
-
-//    public void contextFeatureMappingInitialized(ServletContextEvent event) {
-//        Configuration configuration = new DefaultConfig();
-//        Map<String, String> resultMappingVitals = configuration.loadMapDict("tc.dict");
-////        List<ResultMappingVital> resultMappingVitals = resultMappingVitalService.selectList(new HashMap<>());
-//        Map<String, String> mapping = new HashMap<>();
-//        for (ResultMappingVital resultMappingVital : resultMappingVitals) {
-//            mapping.put(resultMappingVital.getName(), resultMappingVital.getNameMapping());
-//        }
-//        event.getServletContext().setAttribute(BigDataConstants.result_mapping_vital, mapping);
-//    }
-//
-//    public void contextResultMappingDiagInitialized(ServletContextEvent event) {
-//        List<ResultMappingDiag> resultMappingDiags = resultMappingDiagService.selectList(new HashMap<>());
-//
-//        Map<String, String> mapping = new HashMap<>();
-//        for (ResultMappingDiag resultMappingDiag : resultMappingDiags) {
-//            mapping.put(resultMappingDiag.getDiagName(), resultMappingDiag.getDeptName());
-//        }
-//        event.getServletContext().setAttribute(BigDataConstants.result_mapping_diag, mapping);
-//    }
-//
-//    public void contextResultMappingFilterInitialized(ServletContextEvent event) {
-//        List<ResultMappingFilter> resultMappingFilters = resultMappingFilterService.selectList(new HashMap<>());
-//
-//        Map<String, Map<String, ResultMappingFilter>> mapping = new HashMap<>();
-//        Map<String, ResultMappingFilter> filterMap = null;
-//        for (ResultMappingFilter resultMappingFilter : resultMappingFilters) {
-//            filterMap = mapping.get(resultMappingFilter.getFeatureType());
-//            if (filterMap == null) {
-//                filterMap = new HashMap<>();
-//            }
-//            filterMap.put(resultMappingFilter.getFeatureName(), resultMappingFilter);
-//            mapping.put(resultMappingFilter.getFeatureType(), filterMap);
-//        }
-//        event.getServletContext().setAttribute(BigDataConstants.result_mapping_filter, mapping);
-//    }
-
-    private void put(Map<String, List<String>> map, String key, List<String> value, String ele) {
-        if (value == null) {
-            value = new ArrayList<>(Arrays.asList(ele));
-            map.put(key, value);
-        } else {
-            if (!value.contains(ele)) {
-                value.add(ele);
-                map.put(key, value);
-            }
-        }
-    }
-}

+ 2 - 2
bigdata-web/src/main/java/org/diagbot/bigdata/controller/AlgorithmController.java

@@ -1,8 +1,8 @@
 package org.diagbot.bigdata.controller;
 
 import org.diagbot.bigdata.work.AlgorithmCore;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.pub.api.Response;
 import org.diagbot.pub.web.BaseController;

+ 0 - 22
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingDiagMapper.java

@@ -1,22 +0,0 @@
-/** 
-* @Company: 杭州朗通信息技术有限公司
-* @Department: 医疗事业部
-* @Description: 互动反馈系统 
-* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
-*/
-package org.diagbot.bigdata.dao.mapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
-import org.diagbot.pub.orm.EntityMapper;
-
-/**
-* @Title: Feature.java
-* @Package: com.zjlantone.nlp.web.doc.dao.model
-* @Description: 数据库操作接口类 
-* @author: 楼辉荣
-* @date: 2016年8月8日 下午17:16:23
-* @version: V1.0
-*/
-public interface ResultMappingDiagMapper extends EntityMapper<ResultMappingDiag, ResultMappingDiagWrapper, Long> {
-}

+ 0 - 22
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingFilterMapper.java

@@ -1,22 +0,0 @@
-/** 
-* @Company: 杭州朗通信息技术有限公司
-* @Department: 医疗事业部
-* @Description: 互动反馈系统 
-* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
-*/
-package org.diagbot.bigdata.dao.mapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
-import org.diagbot.pub.orm.EntityMapper;
-
-/**
-* @Title: Feature.java
-* @Package: com.zjlantone.nlp.web.doc.dao.model
-* @Description: 数据库操作接口类 
-* @author: 楼辉荣
-* @date: 2016年8月8日 下午17:16:23
-* @version: V1.0
-*/
-public interface ResultMappingFilterMapper extends EntityMapper<ResultMappingFilter, ResultMappingFilterWrapper, Long> {
-}

+ 0 - 22
bigdata-web/src/main/java/org/diagbot/bigdata/dao/mapper/ResultMappingVitalMapper.java

@@ -1,22 +0,0 @@
-/** 
-* @Company: 杭州朗通信息技术有限公司
-* @Department: 医疗事业部
-* @Description: 互动反馈系统 
-* @Address: 浙江省杭州市余杭区向往街1008号乐富海邦园11幢4楼
-*/
-package org.diagbot.bigdata.dao.mapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper;
-import org.diagbot.pub.orm.EntityMapper;
-
-/**
-* @Title: Feature.java
-* @Package: com.zjlantone.nlp.web.doc.dao.model
-* @Description: 数据库操作接口类 
-* @author: 楼辉荣
-* @date: 2016年8月8日 下午17:16:23
-* @version: V1.0
-*/
-public interface ResultMappingVitalMapper extends EntityMapper<ResultMappingVital, ResultMappingVitalWrapper, Long> {
-}

+ 0 - 38
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingDiag.java

@@ -1,38 +0,0 @@
-package org.diagbot.bigdata.dao.model;
-
-import java.io.Serializable;
-
-/**
- * @Auther: fyeman
- * @Date: 2018/9/12/012 16:50
- * @Description:
- */
-public class ResultMappingDiag implements Serializable {
-    private Long id;
-    private String diagName;
-    private String deptName;
-
-    public Long getId() {
-        return id;
-    }
-
-    public void setId(Long id) {
-        this.id = id;
-    }
-
-    public String getDiagName() {
-        return diagName;
-    }
-
-    public void setDiagName(String diagName) {
-        this.diagName = diagName;
-    }
-
-    public String getDeptName() {
-        return deptName;
-    }
-
-    public void setDeptName(String deptName) {
-        this.deptName = deptName;
-    }
-}

+ 0 - 68
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingFilter.java

@@ -1,68 +0,0 @@
-package org.diagbot.bigdata.dao.model;
-
-
-public class ResultMappingFilter {
-    private Long id;
-    private String featureName;
-    private String featureType;
-    private String sex;
-    private int ageStart;
-    private int ageEnd;
-    private String remark;
-
-    public Long getId() {
-        return id;
-    }
-
-    public void setId(Long id) {
-        this.id = id;
-    }
-
-    public String getFeatureName() {
-        return featureName;
-    }
-
-    public void setFeatureName(String featureName) {
-        this.featureName = featureName;
-    }
-
-    public String getFeatureType() {
-        return featureType;
-    }
-
-    public void setFeatureType(String featureType) {
-        this.featureType = featureType;
-    }
-
-    public String getSex() {
-        return sex;
-    }
-
-    public void setSex(String sex) {
-        this.sex = sex;
-    }
-
-    public int getAgeStart() {
-        return ageStart;
-    }
-
-    public void setAgeStart(int ageStart) {
-        this.ageStart = ageStart;
-    }
-
-    public int getAgeEnd() {
-        return ageEnd;
-    }
-
-    public void setAgeEnd(int ageEnd) {
-        this.ageEnd = ageEnd;
-    }
-
-    public String getRemark() {
-        return remark;
-    }
-
-    public void setRemark(String remark) {
-        this.remark = remark;
-    }
-}

+ 0 - 38
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/ResultMappingVital.java

@@ -1,38 +0,0 @@
-package org.diagbot.bigdata.dao.model;
-
-import java.io.Serializable;
-
-/**
- * @Auther: fyeman
- * @Date: 2018/9/10/010 14:30
- * @Description:
- */
-public class ResultMappingVital implements Serializable {
-    private Long id;
-    private String name;
-    private String nameMapping;
-
-    public Long getId() {
-        return id;
-    }
-
-    public void setId(Long id) {
-        this.id = id;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public String getNameMapping() {
-        return nameMapping;
-    }
-
-    public void setNameMapping(String nameMapping) {
-        this.nameMapping = nameMapping;
-    }
-}

+ 0 - 12
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingDiagWrapper.java

@@ -1,12 +0,0 @@
-package org.diagbot.bigdata.dao.model.wrapper;
-
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-
-/**
- * @Auther: fyeman
- * @Date: 2018/9/12/012 16:51
- * @Description:
- */
-public class ResultMappingDiagWrapper extends ResultMappingDiag {
-}

+ 0 - 6
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingFilterWrapper.java

@@ -1,6 +0,0 @@
-package org.diagbot.bigdata.dao.model.wrapper;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-
-public class ResultMappingFilterWrapper extends ResultMappingFilter {
-}

+ 0 - 7
bigdata-web/src/main/java/org/diagbot/bigdata/dao/model/wrapper/ResultMappingVitalWrapper.java

@@ -1,7 +0,0 @@
-package org.diagbot.bigdata.dao.model.wrapper;
-
-
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-
-public class ResultMappingVitalWrapper extends ResultMappingVital {
-}

+ 0 - 55
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingDiagMapper.xml

@@ -1,55 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
-        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
-<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingDiagMapper">
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingDiag" id="resultMappingDiagMap">
-        <id property="id" column="id"/>
-        <result property="diagName" column="diag_name"/>
-        <result property="deptName" column="dept_name"/>
-    </resultMap>
-
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper" id="resultMappingDiagWrapperMap">
-        <id property="id" column="id"/>
-        <result property="diagName" column="diag_name"/>
-        <result property="deptName" column="dept_name"/>
-    </resultMap>
-
-    <!-- 通用查询结果列-->
-    <sql id="Base_Column_List">
-		 t.id,	 t.diag_name,	 t.dept_name
-	</sql>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectByPrimaryKey" resultMap="resultMappingDiagMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_diag t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingDiagWrapperMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_diag t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 依据Map查询条件返回结果集-->
-    <select id="selectList" resultMap="resultMappingDiagMap" parameterType="java.util.Map">
-        select diag_name, group_concat(dept_name) dept_name from doc_result_mapping_diag  group by diag_name
-    </select>
-
-    <!-- 依据Map查询条件返回扩展属性结果集-->
-    <select id="selectListWrapper" resultMap="resultMappingDiagWrapperMap" parameterType="java.util.Map">
-        select diag_name, group_concat(dept_name) dept_name from doc_result_mapping_diag  group by diag_name
-    </select>
-
-    <!--删除:根据主键ID删除-->
-    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
-		 DELETE FROM doc_result_mapping_diag
-		 WHERE id = #{id}
-	</delete>
-</mapper>

+ 0 - 67
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingFilterMapper.xml

@@ -1,67 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
-        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
-<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingFilterMapper">
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingFilter" id="resultMappingFilterMap">
-        <id property="id" column="id"/>
-        <result property="featureName" column="feature_name"/>
-        <result property="featureType" column="feature_type"/>
-        <result property="sex" column="sex"/>
-        <result property="ageStart" column="age_start"/>
-        <result property="ageEnd" column="age_end"/>
-        <result property="remark" column="remark"/>
-    </resultMap>
-
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper" id="resultMappingFilterWrapperMap">
-        <id property="id" column="id"/>
-        <result property="featureName" column="feature_name"/>
-        <result property="featureType" column="feature_type"/>
-        <result property="sex" column="sex"/>
-        <result property="ageStart" column="age_start"/>
-        <result property="ageEnd" column="age_end"/>
-        <result property="remark" column="remark"/>
-    </resultMap>
-
-    <!-- 通用查询结果列-->
-    <sql id="Base_Column_List">
-		 t.id, t.feature_name, t.feature_type, t.sex, t.age_start, t.age_end, t.remark
-	</sql>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectByPrimaryKey" resultMap="resultMappingFilterMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingFilterWrapperMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 依据Map查询条件返回结果集-->
-    <select id="selectList" resultMap="resultMappingFilterMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t WHERE 1=1
-    </select>
-
-    <!-- 依据Map查询条件返回扩展属性结果集-->
-    <select id="selectListWrapper" resultMap="resultMappingFilterWrapperMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_filter t WHERE 1=1
-    </select>
-
-    <!--删除:根据主键ID删除-->
-    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
-		 DELETE FROM doc_result_mapping_filter
-		 WHERE id = #{id}
-	</delete>
-</mapper>

+ 0 - 77
bigdata-web/src/main/java/org/diagbot/bigdata/dao/xml/ResultMappingVitalMapper.xml

@@ -1,77 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
-        "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
-<mapper namespace="org.diagbot.bigdata.dao.mapper.ResultMappingVitalMapper">
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.ResultMappingVital" id="resultMappingVitalMap">
-        <id property="id" column="id"/>
-        <result property="name" column="name"/>
-        <result property="nameMapping" column="name_mapping"/>
-    </resultMap>
-
-    <!-- 映射定义列-->
-    <resultMap type="org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper" id="resultMappingVitalWrapperMap">
-        <id property="id" column="id"/>
-        <result property="name" column="name"/>
-        <result property="nameMapping" column="name_mapping"/>
-    </resultMap>
-
-    <!-- 通用查询结果列-->
-    <sql id="Base_Column_List">
-		 t.id,	 t.name,	 t.name_mapping
-	</sql>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectByPrimaryKey" resultMap="resultMappingVitalMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 查询(根据主键ID查询) -->
-    <select id="selectWrapperByPrimaryKey" resultMap="resultMappingVitalWrapperMap" parameterType="java.lang.Integer">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t
-        WHERE t.id = #{id}
-    </select>
-
-    <!-- 依据Map查询条件返回结果集-->
-    <select id="selectList" resultMap="resultMappingVitalMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t WHERE 1=1
-        <if test="id != null and id != ''">
-            and t.id = #{id}
-        </if>
-        <if test="name != null and name != ''">
-            and t.name = #{name}
-        </if>
-        <if test="nameMapping != null and nameMapping != ''">
-            and t.name_mapping = #{nameMapping}
-        </if>
-    </select>
-
-    <!-- 依据Map查询条件返回扩展属性结果集-->
-    <select id="selectListWrapper" resultMap="resultMappingVitalWrapperMap" parameterType="java.util.Map">
-        SELECT
-        <include refid="Base_Column_List"/>
-        FROM doc_result_mapping_vital t WHERE 1=1
-        <if test="id != null and id != ''">
-            and t.id = #{id}
-        </if>
-        <if test="name != null and name != ''">
-            and t.name = #{name}
-        </if>
-        <if test="nameMapping != null and nameMapping != ''">
-            and t.name_mapping = #{nameMapping}
-        </if>
-    </select>
-
-    <!--删除:根据主键ID删除-->
-    <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
-		 DELETE FROM doc_result_mapping_vital
-		 WHERE id = #{id}
-	</delete>
-</mapper>

+ 0 - 8
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingDiagService.java

@@ -1,8 +0,0 @@
-package org.diagbot.bigdata.service;
-
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
-import org.diagbot.pub.service.BaseService;
-
-public interface ResultMappingDiagService extends BaseService<ResultMappingDiag, ResultMappingDiagWrapper, Long> {
-}

+ 0 - 8
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingFilterService.java

@@ -1,8 +0,0 @@
-package org.diagbot.bigdata.service;
-
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
-import org.diagbot.pub.service.BaseService;
-
-public interface ResultMappingFilterService extends BaseService<ResultMappingFilter, ResultMappingFilterWrapper, Long> {
-}

+ 0 - 8
bigdata-web/src/main/java/org/diagbot/bigdata/service/ResultMappingVitalService.java

@@ -1,8 +0,0 @@
-package org.diagbot.bigdata.service;
-
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper;
-import org.diagbot.pub.service.BaseService;
-
-public interface ResultMappingVitalService extends BaseService<ResultMappingVital, ResultMappingVitalWrapper, Long> {
-}

+ 0 - 21
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingDiagServiceImpl.java

@@ -1,21 +0,0 @@
-package org.diagbot.bigdata.service.impl;
-
-import org.diagbot.bigdata.dao.mapper.ResultMappingDiagMapper;
-import org.diagbot.bigdata.dao.model.ResultMappingDiag;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingDiagWrapper;
-import org.diagbot.bigdata.service.ResultMappingDiagService;
-import org.diagbot.pub.service.BaseServiceImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class ResultMappingDiagServiceImpl extends BaseServiceImpl<ResultMappingDiag, ResultMappingDiagWrapper, Long> implements ResultMappingDiagService {
-    @Autowired
-    ResultMappingDiagMapper resultMappingDiagMapper;
-
-    @Autowired
-    private void setEntityMapper() {
-        super.setEntityMapper(resultMappingDiagMapper);
-    }
-
-}

+ 0 - 21
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingFilterServiceImpl.java

@@ -1,21 +0,0 @@
-package org.diagbot.bigdata.service.impl;
-
-import org.diagbot.bigdata.dao.mapper.ResultMappingFilterMapper;
-import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingFilterWrapper;
-import org.diagbot.bigdata.service.ResultMappingFilterService;
-import org.diagbot.pub.service.BaseServiceImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class ResultMappingFilterServiceImpl extends BaseServiceImpl<ResultMappingFilter, ResultMappingFilterWrapper, Long> implements ResultMappingFilterService {
-    @Autowired
-    ResultMappingFilterMapper resultMappingFilterMapper;
-
-    @Autowired
-    private void setEntityMapper() {
-        super.setEntityMapper(resultMappingFilterMapper);
-    }
-
-}

+ 0 - 21
bigdata-web/src/main/java/org/diagbot/bigdata/service/impl/ResultMappingVitalServiceImpl.java

@@ -1,21 +0,0 @@
-package org.diagbot.bigdata.service.impl;
-
-import org.diagbot.bigdata.dao.mapper.ResultMappingVitalMapper;
-import org.diagbot.bigdata.dao.model.ResultMappingVital;
-import org.diagbot.bigdata.dao.model.wrapper.ResultMappingVitalWrapper;
-import org.diagbot.bigdata.service.ResultMappingVitalService;
-import org.diagbot.pub.service.BaseServiceImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class ResultMappingVitalServiceImpl extends BaseServiceImpl<ResultMappingVital, ResultMappingVitalWrapper, Long> implements ResultMappingVitalService {
-    @Autowired
-    ResultMappingVitalMapper resultMappingVitalMapper;
-
-    @Autowired
-    private void setEntityMapper() {
-        super.setEntityMapper(resultMappingVitalMapper);
-    }
-
-}

+ 5 - 7
bigdata-web/src/main/java/org/diagbot/bigdata/work/AlgorithmCore.java

@@ -3,9 +3,9 @@ package org.diagbot.bigdata.work;
 import org.algorithm.core.AlgorithmExecutor;
 import org.algorithm.factory.AlgorithmFactory;
 import org.algorithm.util.AlgorithmClassify;
-import org.diagbot.common.work.FeatureRate;
-import org.diagbot.common.work.ResponseData;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.FeatureRate;
+import org.diagbot.common.push.bean.ResponseData;
+import org.diagbot.common.push.bean.SearchData;
 import org.diagbot.nlp.feature.FeatureType;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.nlp.util.NlpCache;
@@ -27,9 +27,9 @@ public class AlgorithmCore {
     Logger logger = LoggerFactory.getLogger(AlgorithmCore.class);
     public ResponseData algorithm(HttpServletRequest request, SearchData searchData, ResponseData responseData) throws Exception {
         //录入文本处理,包括提取特征、推送类型转换等
-        ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
+        BigDataParamsProxy paramsDataProxy = new BigDataParamsProxy();
         logger.info("页面文本信息:" + searchData.getSymptom());
-        paramsDataProxy.createSearchData(request, searchData);
+        paramsDataProxy.createSearchData(searchData);
         //对象拷贝至BigDataSearchData处理
         BigDataSearchData bigDataSearchData = new BigDataSearchData();
         BeanUtils.copyProperties(searchData, bigDataSearchData);
@@ -62,8 +62,6 @@ public class AlgorithmCore {
                 if (Constants.feature_type_symptom.equals(searchData.getFeatureTypes()[i])) {
                     featuresMap = resultDataProxy.mapAdd(featuresMap, NlpCache.getStandard_info_push_map(), true);
                 }
-                //大小类合并
-                featuresMap = resultDataProxy.resultMerge(request, featuresMap);
                 //按模型计算的概率排序
                 featuresOrderList = new ArrayList<Map.Entry<String, Float>>(featuresMap.entrySet());
                 Collections.sort(featuresOrderList, new Comparator<Map.Entry<String, Float>>() {

+ 94 - 0
bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataParamsProxy.java

@@ -0,0 +1,94 @@
+package org.diagbot.bigdata.work;
+
+import org.algorithm.util.AlgorithmClassify;
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.work.ParamsDataProxy;
+import org.diagbot.nlp.feature.FeatureType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 14:04
+ * @Version 1.0
+ **/
+public class BigDataParamsProxy {
+    Logger logger = LoggerFactory.getLogger(BigDataParamsProxy.class);
+
+    public void createSearchData(SearchData searchData) throws Exception {
+        ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
+        paramsDataProxy.createSearchData(searchData);
+    }
+    /**
+     * featureType转算法模型类型
+     *
+     * @param sysCode
+     * @param featureTypes
+     * @param searchData
+     */
+    public AlgorithmClassify[] createAlgorithmClassify(String sysCode, String[] featureTypes, SearchData searchData) {
+        AlgorithmClassify[] classifies = new AlgorithmClassify[featureTypes.length];
+        //下了诊断且其他信息全为空 反推标识
+        boolean reverse = !StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom());
+        for (int i = 0; i < featureTypes.length; i++) {
+            if (featureTypes[i] != null) {
+                //模型
+                switch (FeatureType.parse(featureTypes[i])) {
+                    case SYMPTOM:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_SYMPTOM;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_SYMPTOM;
+                        }
+                        break;
+                    case DIAG:
+                        if (reverse) {
+                            classifies[i] = null;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG;
+                        }
+                        break;
+                    case VITAL:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_VITAL;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_VITAL;
+                        }
+                        break;
+                    case LIS:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_LIS;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_LIS;
+                        }
+                        break;
+                    case PACS:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_PACS;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_PACS;
+                        }
+                        break;
+                    case TREAT:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_TREAT;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_TREAT;
+                        }
+                        break;
+                    case HISTORY:
+                        if (reverse) {
+                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_HISTORY;
+                        } else {
+                            classifies[i] = AlgorithmClassify.NEURAL_HISTORY;
+                        }
+                        break;
+                }
+            }
+        }
+        return classifies;
+    }
+}

+ 1 - 1
bigdata-web/src/main/java/org/diagbot/bigdata/work/BigDataSearchData.java

@@ -1,7 +1,7 @@
 package org.diagbot.bigdata.work;
 
 import org.algorithm.util.AlgorithmClassify;
-import org.diagbot.common.work.SearchData;
+import org.diagbot.common.push.bean.SearchData;
 
 public class BigDataSearchData extends SearchData {
     //模型

+ 0 - 702
bigdata-web/src/main/java/org/diagbot/bigdata/work/ParamsDataProxy.java

@@ -1,702 +0,0 @@
-package org.diagbot.bigdata.work;
-
-import org.algorithm.util.AlgorithmClassify;
-import org.apache.commons.lang3.StringUtils;
-import org.diagbot.bigdata.common.ApplicationCacheUtil;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.common.work.SearchData;
-import org.diagbot.nlp.feature.FeatureAnalyze;
-import org.diagbot.nlp.feature.FeatureType;
-import org.diagbot.nlp.participle.ParticipleUtil;
-import org.diagbot.nlp.participle.word.Lexeme;
-import org.diagbot.nlp.participle.word.LexemePath;
-import org.diagbot.nlp.util.Constants;
-import org.diagbot.nlp.util.NegativeEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.http.HttpServletRequest;
-import java.util.*;
-import java.util.regex.Pattern;
-
-/**
- * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
- * @Description TODO
- * @Author fyeman
- * @Date 2019/1/16/016 14:04
- * @Version 1.0
- **/
-public class ParamsDataProxy {
-    Logger logger = LoggerFactory.getLogger(ParamsDataProxy.class);
-    //标准词只处理的词性
-    public static NegativeEnum[] negativeEnums = new NegativeEnum[] { NegativeEnum.VITAL_INDEX, NegativeEnum.SYMPTOM
-            , NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME, NegativeEnum.UNIT, NegativeEnum.DIAG_STAND
-            , NegativeEnum.OTHER};
-    //标准词处理的三元组
-    public static NegativeEnum[][] negativeEnumTriple = {
-            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS, NegativeEnum.UNIT },
-            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
-            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS, NegativeEnum.UNIT },
-            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
-            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.UNIT },
-            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
-            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.OTHER }
-    };
-    //标准词处理的二元组
-    public static NegativeEnum[][] negativeEnumTwoTuple = {
-            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS },
-            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS }
-    };
-
-    public void createSearchData(HttpServletRequest request, SearchData searchData) throws Exception {
-        //消除空格
-        if (searchData.getSymptom() != null) {
-            searchData.setSymptom(searchData.getSymptom().trim());
-        }
-        if (searchData.getDiag() != null) {
-            searchData.setDiag(searchData.getDiag().trim());
-        }
-        //计算年龄区间
-        if (searchData.getAge() > 0) {
-            searchData.setAge_start(searchData.getAge() - 5);
-            searchData.setAge_end(searchData.getAge() + 5);
-        }
-        //修改性别代码
-        if (!StringUtils.isEmpty(searchData.getSex())) {
-            if ("M".equals(searchData.getSex())) {
-                searchData.setSex("1");
-            } else if ("F".equals(searchData.getSex())) {
-                searchData.setSex("2");
-            } else {
-                searchData.setSex("3");
-            }
-        } else {
-            searchData.setSex("3");
-        }
-        //默认查询门诊数据
-        if (StringUtils.isEmpty(searchData.getResourceType())) {
-            searchData.setResourceType(BigDataConstants.resource_type_o);
-        }
-//        //给症状末尾添加诊断依据标准词
-//        String[] items = { searchData.getSymptom(), searchData.getOther(), searchData.getVital()
-//                , searchData.getLis(), searchData.getPacs(), searchData.getDiag() };
-//        String[] itemsType = { "symptom", "other", "vital", "lis", "pacs", "diag" };
-//        for (int i = 0; i < items.length; i++) {
-//            if (items[i] != null) {
-//                LexemePath<Lexeme> featureData = ParticipleUtil.participle(items[i]);
-//                if (featureData != null) {
-//                    addStandWord(featureData, ApplicationCacheUtil.getKl_result_mapping_standword_map(), searchData, itemsType[i]);
-//                }
-//            }
-//        }
-        //所有信息参与推送
-        //        searchData.setSymptom(searchData.getSymptom() + searchData.getVital()
-        //                + searchData.getLis() + searchData.getPacs() + searchData.getPast() + searchData.getOther() + searchData.getIndications());
-        if (StringUtils.isNotEmpty(searchData.getSymptom())) {
-            searchData.setSymptom(searchData.getSymptom().trim());
-        }
-        //一次推送多个类别信息
-        String[] featureTypes = searchData.getFeatureType().split(",");
-        //featureType统一转换
-        String[] convertFeatureTypes = new String[featureTypes.length];
-        for (int i = 0; i < featureTypes.length; i++) {
-            convertFeatureTypes[i] = convertFeatureType(searchData.getSysCode(), featureTypes[i]);
-        }
-        searchData.setFeatureType(StringUtils.join(convertFeatureTypes, ","));
-        searchData.setFeatureTypes(convertFeatureTypes);
-
-        //获取入参中的特征信息
-        FeatureAnalyze fa = new FeatureAnalyze();
-        List<Map<String, Object>> featuresList = new ArrayList<>();
-        if (!StringUtils.isEmpty(searchData.getSymptom())) {
-            //提取现病史
-            featuresList = fa.start(searchData.getSymptom(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-            //提取时间信息
-            featuresList = fa.start(searchData.getSymptom(), FeatureType.TIME);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getVital())) {
-            //提取体征
-            featuresList = fa.start(searchData.getVital(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getPast())) {
-            //提取既往史
-            featuresList = fa.start(searchData.getPast(), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getOther()) || !StringUtils.isEmpty(searchData.getIndications())) {
-            //提取其他史等
-            featuresList = fa.start((searchData.getOther() == null ? "" : searchData.getOther()) + (searchData.getIndications() == null ? "" : searchData.getIndications()), FeatureType.FEATURE);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getPacs())) {
-            featuresList = fa.start(searchData.getPacs(), FeatureType.PACS);
-            paramFeatureInit(searchData, featuresList);
-        }
-        if (!StringUtils.isEmpty(searchData.getLis())) {
-            featuresList = fa.start(searchData.getLis(), FeatureType.LIS);
-            paramFeatureInit(searchData, featuresList);
-        }
-        // 清洗特征词,去除词性不匹配的词
-        searchData = cleanFeature(featuresList, fa, searchData);
-        if (!StringUtils.isEmpty(searchData.getOther())) {
-            //如果既往史中诊断信息,需要提取这个特征
-            featuresList = fa.start(searchData.getOther(), FeatureType.DIAG);
-            paramFeatureInit(searchData, featuresList);
-        }
-
-        if (!StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom())) {
-            featuresList = fa.start(searchData.getDiag(), FeatureType.DIAG);
-            paramFeatureInit(searchData, featuresList);
-        }
-    }
-
-    /**
-     * featureType转算法模型类型
-     *
-     * @param sysCode
-     * @param featureTypes
-     * @param searchData
-     */
-    public AlgorithmClassify[] createAlgorithmClassify(String sysCode, String[] featureTypes, SearchData searchData) {
-        AlgorithmClassify[] classifies = new AlgorithmClassify[featureTypes.length];
-        //下了诊断且其他信息全为空 反推标识
-        boolean reverse = !StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom());
-        for (int i = 0; i < featureTypes.length; i++) {
-            //            featureTypes[i] = convertFeatureType(sysCode, featureTypes[i]);
-            if (featureTypes[i] != null) {
-                //模型
-                switch (FeatureType.parse(featureTypes[i])) {
-                    case SYMPTOM:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_SYMPTOM;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_SYMPTOM;
-                        }
-                        break;
-                    case DIAG:
-                        if (reverse) {
-                            classifies[i] = null;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG;
-                        }
-                        break;
-                    case VITAL:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_VITAL;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_VITAL;
-                        }
-                        break;
-                    case LIS:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_LIS;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_LIS;
-                        }
-                        break;
-                    case PACS:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_PACS;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_PACS;
-                        }
-                        break;
-                    case TREAT:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_TREAT;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_TREAT;
-                        }
-                        break;
-                    case HISTORY:
-                        if (reverse) {
-                            classifies[i] = AlgorithmClassify.NEURAL_DIAG_HISTORY;
-                        } else {
-                            classifies[i] = AlgorithmClassify.NEURAL_HISTORY;
-                        }
-                        break;
-                }
-            }
-        }
-        return classifies;
-    }
-
-    /**
-     * 外部系统featureType需要转化为大数据定义的featureType
-     *
-     * @param sysCode
-     * @param featureType
-     * @return
-     */
-    private String convertFeatureType(String sysCode, String featureType) {
-        if (StringUtils.isEmpty(sysCode) || sysCode.equals("1")) {
-            if ("1".equals(featureType)) {
-                return BigDataConstants.feature_type_symptom;
-            }
-            if ("7".equals(featureType)) {
-                return BigDataConstants.feature_type_diag;
-            }
-            if ("4".equals(featureType)) {
-                return BigDataConstants.feature_type_vital;
-            }
-            if ("5".equals(featureType)) {
-                return BigDataConstants.feature_type_lis;
-            }
-            if ("6".equals(featureType)) {
-                return BigDataConstants.feature_type_pacs;
-            }
-            if ("3".equals(featureType)) {
-                return BigDataConstants.feature_type_history;
-            }
-            if ("8".equals(featureType)) {
-                return BigDataConstants.feature_type_treat;
-            }
-            if ("22".equals(featureType)) {
-                return BigDataConstants.feature_type_labelpush;
-            }
-            if ("11".equals(featureType)) {
-                return BigDataConstants.feature_type_manju;
-            }
-            if ("42".equals(featureType)) {
-                return BigDataConstants.feature_type_vital_index;
-            }
-            return null;
-        }
-        return featureType;
-    }
-
-    /**
-     * 推送模型入参
-     *
-     * @param searchData
-     * @throws Exception
-     */
-    private void paramFeatureInit(SearchData searchData, List<Map<String, Object>> featuresList) throws Exception {
-        if (featuresList != null && featuresList.size() > 0) {
-            Map<String, Object> featureMap = null;
-            for (int i = 0; i < featuresList.size(); i++) {
-                featureMap = featuresList.get(i);
-                Map<String, String> map = new HashMap<>();
-                for (Map.Entry<String, Object> entry : featureMap.entrySet()) {
-                    map.put(entry.getKey(), String.valueOf(entry.getValue()));
-                }
-                map.put("featureType", String.valueOf(featureMap.get("feature_type")));
-                map.put("featureName", String.valueOf(featureMap.get("feature_name")));
-                map.put("property", String.valueOf(featureMap.get("property")));
-                map.put("concept", String.valueOf(featureMap.get("concept")));
-                if (Constants.default_negative.equals(featureMap.get("negative"))) {
-                    if (map.get("featureType").equals(Constants.feature_type_time)) {
-                        searchData.getInputs().put("时间", map);
-                    } else {
-                        if (searchData.getInputs().get(map.get("feature_name")) == null) {
-                            if (i < 5) {
-                                searchData.getInputs().put(map.get("feature_name"), map);
-                            }
-                            searchData.getGraphInputs().put(map.get("feature_name"), map);
-                        }
-                    }
-                } else {
-                    searchData.getFilters().put(map.get("feature_name"), map);
-                }
-            }
-        }
-    }
-
-    /**
-     * 给SearchData中症状末尾添加诊断依据标准词
-     *
-     * @param lexemes
-     * @param standWords
-     * @param sData
-     * @return
-     */
-    public SearchData addStandWord(List<Lexeme> lexemes, Map<String, List<Map<String, String>>> standWords, SearchData sData, String itemType) {
-        List<Lexeme> feature = new ArrayList<>();
-
-        //收集分词结果中体征指标或体征指标值(数字)
-        for (Lexeme lexeme : lexemes) {
-            if (lexeme.getProperty().contains(",")) {
-                ApplicationCacheUtil.setProterty(lexeme); //如果分词后词性有多个,只选一个(暂时只处理症状,体征)
-            }
-            NegativeEnum lexemeNegativeEnum = NegativeEnum.parseOfValue(lexeme.getProperty());
-            for (int i = 0; i < negativeEnums.length; i++) {
-                if (lexemeNegativeEnum == negativeEnums[i]) {
-                    feature.add(lexeme);
-                    break;
-                }
-            }
-        }
-        //根据收集到的分词结果把体征指标和对应体征指标值(数字)拼接
-        List<String> featureType = new ArrayList<>();
-
-        for (int i = 0; i < feature.size(); i++) {
-            boolean featureTypeState = true;
-            boolean featureTypeStatus = false;
-            if (i < feature.size() - 2) {
-                for (int j = 0; j < negativeEnumTriple.length; j++) {
-                    String featureText = "";
-                    for (int k = 0; k < negativeEnumTriple[j].length; k++) {
-                        if (NegativeEnum.parseOfValue(feature.get(i + k).getProperty()) == negativeEnumTriple[j][k]) {
-                            featureTypeStatus = true;
-                            featureText += "\t" + feature.get(i + k).getText();
-                        } else {
-                            featureTypeStatus = false;
-                            break;
-                        }
-                    }
-                    if (featureTypeStatus) {
-                        featureType.add(featureText);
-                        featureTypeState = false;
-                    }
-                }
-            }
-            if (featureTypeState && i < feature.size() - 1) {
-                for (int j = 0; j < negativeEnumTwoTuple.length; j++) {
-                    String featureText = "";
-                    for (int k = 0; k < negativeEnumTwoTuple[j].length; k++) {
-                        if (NegativeEnum.parseOfValue(feature.get(i + k).getProperty()) == negativeEnumTwoTuple[j][k]) {
-                            featureTypeStatus = true;
-                            featureText += "\t" + feature.get(i + k).getText();
-                        } else {
-                            featureTypeStatus = false;
-                            break;
-                        }
-                    }
-                    if (featureTypeStatus) {
-                        featureType.add(featureText);
-                    }
-                }
-            }
-        }
-        //将标准词中体征指标值(数字)与分词结果中体征指标值(数字)比较
-        String newStandWord = "";
-        for (String f : featureType) {
-            String[] features = f.trim().split("\t");
-            if (standWords.containsKey(features[0])) {
-                List<Map<String, String>> standWordList = standWords.get(features[0]);
-                for (Map<String, String> standWordMap : standWordList) {
-                    if (standWordMap.containsKey("unit") && standWordMap.containsKey("value")) {
-                        if (features.length == 2) {
-                            newStandWord = judgment(features, standWordMap, newStandWord, sData, itemType);
-                        } else {
-                            if (standWordMap.get("unit").equals(features[2].toLowerCase())) {
-                                newStandWord = judgment(features, standWordMap, newStandWord, sData, itemType);
-                            }
-                        }
-                    } else if (standWordMap.containsKey("value")) {
-                        if (features.length == 2) {
-                            newStandWord = judgment(features, standWordMap, newStandWord, sData, itemType);
-                        }
-                    }
-                }
-            }
-        }
-        //血压既满足血压≥140/90mmHg,又满足血压小于90/60mmHg时,只取前者
-        String addStandWords = "";
-        String smallerStandWord = "";
-        boolean flag = true;
-        for (String standWord : newStandWord.split(",")) {
-            if (!"".equals(standWord) || standWord.length() > 0) {
-                if (standWord.contains("<") || standWord.contains("<=") || standWord.contains("小于")) {
-                    smallerStandWord += "," + standWord;
-                } else {
-                    addStandWords += "," + proxy(standWord);
-                    flag = false;
-                }
-            }
-        }
-        if (flag) {
-            addStandWords += smallerStandWord;
-        }
-        addbloodPressure(sData, itemType, addStandWords);
-        return sData;
-    }
-
-
-    /**
-     * 将标准词中体征指标值(数字)与分词结果中体征指标值(数字)比较
-     * 除了血压>140/90mmHg类似标准词,其他标准词直接添加在症状后面
-     *
-     * @param features
-     * @param standWordMap
-     * @param standWord
-     * @param sData
-     * @return 血压>140/90mmHg或血压小于90/60mmHg或同时返回,在addStandWord()中进一步处理
-     */
-    private String judgment(String[] features, Map<String, String> standWordMap, String standWord, SearchData sData, String itemType) {
-        if (hasDigit(features[1])) {
-            try {
-                if (">".equals(standWordMap.get("op"))) {
-                    //单独处理  血压>140/90mmHg   类似情况
-                    if (features[1].contains("/")) {
-                        if (standWordMap.get("value").contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP > standWordSBP || featuresDBP > standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        //"symptom","other","vital","lis","pacs","diag"
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) > Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                } else if ("<".equals(standWordMap.get("op"))) {
-                    //单独处理  血压小于90/60mmHg   类似情况
-                    if (standWordMap.get("value").contains("/")) {
-                        if (features[1].contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP < standWordSBP || featuresDBP < standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) < Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                } else if (">=".equals(standWordMap.get("op"))) {
-                    //单独处理  血压大于等于140/90mmHg   类似情况
-                    if (standWordMap.get("value").contains("/")) {
-                        if (features[1].contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP >= standWordSBP || featuresDBP >= standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) >= Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                } else if ("<=".equals(standWordMap.get("op"))) {
-                    //单独处理  血压小于等于90/60mmHg   类似情况
-                    if (standWordMap.get("value").contains("/")) {
-                        if (features[1].contains("/")) {
-                            String[] feature = features[1].split("/");
-                            Integer featuresSBP = Integer.valueOf(feature[0]); //分词特征收缩压
-                            Integer featuresDBP = Integer.valueOf(feature[1]); //分词特征舒张压
-
-                            String[] values = standWordMap.get("value").split("/");
-                            Integer standWordSBP = Integer.valueOf(values[0]); //标准词收缩压
-                            Integer standWordDBP = Integer.valueOf(values[1]); //标准词舒张压
-                            if (featuresSBP <= standWordSBP || featuresDBP <= standWordDBP) {
-                                standWord += "," + standWordMap.get("standword");
-                            }
-                        }
-                    } else {
-                        String num = getNum(standWordMap.get("value"));
-                        if (Double.valueOf(getNum(features[1])) <= Double.valueOf(num)) {
-                            setStandword(standWordMap, sData, itemType);
-                        }
-                    }
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        return standWord;
-    }
-
-    /**
-     * 根据不同项目添加标准词
-     *
-     * @param standWordMap
-     * @param sData
-     * @param itemType
-     */
-    private void setStandword(Map<String, String> standWordMap, SearchData sData, String itemType) {
-        switch (itemType) {
-            case "symptom":
-                if (sData.getSymptom().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setSymptom(sData.getSymptom() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "other":
-                if (sData.getOther().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setOther(sData.getOther() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "vital":
-                if (sData.getVital().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setVital(sData.getVital() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "lis":
-                if (sData.getLis().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setLis(sData.getLis() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "pacs":
-                if (sData.getPacs().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setPacs(sData.getPacs() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-            case "diag":
-                if (sData.getDiag().indexOf(proxy(standWordMap.get("standword"))) == -1) {
-                    sData.setDiag(sData.getDiag() + "," + proxy(standWordMap.get("standword")));
-                }
-                break;
-        }
-    }
-
-    /**
-     * 添加血压(血压既满足血压≥140/90mmHg,又满足血压小于90/60mmHg时,只取前者)
-     *
-     * @param sData
-     * @param itemType
-     * @param addStandWords
-     */
-    private void addbloodPressure(SearchData sData, String itemType, String addStandWords) {
-        switch (itemType) {
-            case "symptom":
-                if (sData.getSymptom().indexOf(addStandWords) == -1) {
-                    sData.setSymptom(sData.getSymptom() + "," + addStandWords);
-                }
-                break;
-            case "other":
-                if (sData.getOther().indexOf(addStandWords) == -1) {
-                    sData.setOther(sData.getOther() + "," + addStandWords);
-                }
-                break;
-            case "vital":
-                if (sData.getVital().indexOf(addStandWords) == -1) {
-                    sData.setVital(sData.getVital() + "," + addStandWords);
-                }
-                break;
-            case "lis":
-                if (sData.getLis().indexOf(addStandWords) == -1) {
-                    sData.setLis(sData.getLis() + "," + addStandWords);
-                }
-                break;
-            case "pacs":
-                if (sData.getPacs().indexOf(addStandWords) == -1) {
-                    sData.setPacs(sData.getPacs() + "," + addStandWords);
-                }
-                break;
-            case "diag":
-                if (sData.getDiag().indexOf(addStandWords) == -1) {
-                    sData.setDiag(sData.getDiag() + "," + addStandWords);
-                }
-                break;
-        }
-    }
-
-    /**
-     * 判断分词后的特征中是否含有数字
-     *
-     * @param content
-     * @return
-     */
-    private boolean hasDigit(String content) {
-        boolean flag = false;
-        if (Pattern.compile(".*\\d+.*").matcher(content).matches()) {
-            flag = true;
-        }
-        return flag;
-    }
-
-    /**
-     * 将字符串中的数字提取出来,针对分词结果中"90."类似情况
-     *
-     * @param standWord
-     * @return
-     */
-    private String getNum(String standWord) {
-        StringBuffer sb = new StringBuffer();
-        for (String num : standWord.replaceAll("[^0-9]", ",").split(",")) {
-            if (num.length() > 0) {
-                sb.append(num);
-            }
-        }
-        return sb.toString();
-    }
-
-    /**
-     * 将血压超过标准值的标准词改为血压升高
-     *
-     * @param standWord
-     * @return
-     */
-    private String proxy(String standWord) {
-        if (standWord.contains("压") && (standWord.contains("≥") || standWord.contains("大于"))) {
-            standWord = "血压升高";
-        } else if (standWord.contains("心率") && (standWord.contains("大于") || standWord.contains("超过"))) {
-            standWord = "心率快";
-        }
-        return standWord;
-    }
-
-    private SearchData cleanFeature(List<Map<String, Object>> featuresList, FeatureAnalyze fa,
-                                    SearchData searchData) {
-        // 在输入的辅检文本中,只提取辅检信息
-        String[] PACS_Feature = { Constants.word_property_PACS,
-                Constants.word_property_PACS_Detail, Constants.word_property_PACS_Result };
-        searchData = removeFeature(searchData.getLis(), fa, searchData, PACS_Feature, FeatureType.PACS);
-
-        // 在输入的化验文本中,只提取化验信息
-        String[] LIS_Feature = { Constants.word_property_LIS,
-                Constants.word_property_LIS_Detail, Constants.word_property_LIS_Result };
-        searchData = removeFeature(searchData.getPacs(), fa, searchData, LIS_Feature, FeatureType.LIS);
-
-        return searchData;
-    }
-
-    private SearchData removeFeature(String text, FeatureAnalyze fa,
-                                     SearchData searchData, String[] properties, FeatureType featureType) {
-        String name = "";
-        Boolean related = false;
-
-        try {
-            List<Map<String, Object>> featureList = fa.start(text, featureType);
-            if (featureList != null) {
-                for (Map<String, Object> item : featureList) {
-                    name = item.get("feature_name").toString();
-                    String[] property = item.get("property").toString().split(",");
-                    for (String prop : property) {
-                        if (Arrays.asList(properties).contains(prop)) {
-                            //                            related = true;
-                            searchData.getInputs().remove(name);
-                            break;
-                        }
-                    }
-
-                    //                    if (!related) {
-                    //                        searchData.getInputs().remove(name);
-                    //                    }
-                    //9
-                    //                    related = false;
-                }
-            }
-
-        } catch (Exception ex) {
-            ex.printStackTrace();
-        } finally {
-            return searchData;
-        }
-    }
-}

+ 1 - 95
bigdata-web/src/main/java/org/diagbot/bigdata/work/ResultDataProxy.java

@@ -3,8 +3,7 @@ package org.diagbot.bigdata.work;
 import org.apache.commons.lang3.StringUtils;
 import org.diagbot.bigdata.common.ApplicationCacheUtil;
 import org.diagbot.bigdata.dao.model.ResultMappingFilter;
-import org.diagbot.bigdata.util.BigDataConstants;
-import org.diagbot.common.work.FeatureRate;
+import org.diagbot.common.push.bean.FeatureRate;
 import org.diagbot.nlp.feature.FeatureType;
 import org.diagbot.nlp.util.Constants;
 import org.diagbot.nlp.util.NegativeEnum;
@@ -29,7 +28,6 @@ public class ResultDataProxy {
         DecimalFormat df = new DecimalFormat("0.####");
         List<FeatureRate> featureList = new ArrayList<>(10);
 
-//        Map<String, String> resultMappingVitalMap = ApplicationCacheUtil.getDoc_result_mapping_vital_map();
         Map<String, String> resultMappingDiagMap = ApplicationCacheUtil.getDoc_result_mapping_diag_map();
         Map<String, Map<String, ResultMappingFilter>> resultMappingFilterMap = ApplicationCacheUtil.getDoc_result_mapping_filter_map();
         Map<String, Map<String, String>> synonymMap = ApplicationCacheUtil.getStandard_info_synonym_map();
@@ -67,19 +65,11 @@ public class ResultDataProxy {
                 }
             }
             featureList.add(featureRate);
-//            if (!featureType.equals(Constants.feature_type_diag)) {
                 if (cursor < searchData.getLength()) {
                     cursor++;
                 } else {
                     break;
                 }
-//            } else {            //诊断最多返回5个
-//                if (cursor < 5) {
-//                    cursor++;
-//                } else {
-//                    break;
-//                }
-//            }
         }
 
         return featureList;
@@ -149,88 +139,4 @@ public class ResultDataProxy {
         }
         return result;
     }
-
-    /**
-     * 大小类数据合并
-     *
-     * @param request
-     * @param map
-     */
-    public Map<String, Float> resultMerge(HttpServletRequest request, Map<String, Float> map) {
-        Map<String, NlpCache.Node> nodesMap = NlpCache.getStandard_info_type_tree_map();
-        Map<String, Float> resultMap = new HashMap<>();
-        //设定阀值
-        float threshold = 0.001f;
-        Map<String, Float> thresholdMap = new HashMap<>();
-        for (Map.Entry<String, Float> entry : map.entrySet()) {
-            if (!"null".equals(entry.getKey()) && entry.getValue() >= threshold) {
-                thresholdMap.put(entry.getKey(), entry.getValue());
-            }
-        }
-
-        NlpCache.Node node = null;
-        List<String> delList = new ArrayList<>();
-        for (Map.Entry<String, Float> entry : thresholdMap.entrySet()) {
-            if (delList.contains(entry.getKey())) continue;
-
-            node = nodesMap.get(entry.getKey());
-            if (node != null) {
-                String topName = node.getName();
-                NlpCache.Node p = node.getParent();
-                if (p != null && nodesMap.get(p.getName()) != null) {
-                    topName = p.getName();
-                }
-                while (p != null) {
-                    List<String> nodeNamesList = new ArrayList<>();
-                    lookChilds(topName, p, thresholdMap, nodeNamesList);
-                    if (nodeNamesList.size() > 0) {
-                        topName = p.getName();
-                    }
-                    p = p.getParent();
-                }
-
-                if (thresholdMap.get(topName) != null) {
-                    resultMap.put(topName, thresholdMap.get(topName));
-                    delList.add(topName);
-                }
-                NlpCache.Node topNode = nodesMap.get(topName);
-                lookChildsAndCal(resultMap, thresholdMap, topNode, delList, topNode.getName());
-                delList.add(topName);
-            } else {
-                resultMap.put(entry.getKey(), entry.getValue());
-            }
-        }
-        return resultMap;
-    }
-
-    private void lookChilds(String own, NlpCache.Node p, Map<String, Float> thresholdMap, List<String> nodeNamesList) {
-        for (NlpCache.Node n : p.getChilds()) {
-            if (own.equals(n.getName())) {
-                continue;
-            } else {
-                if (thresholdMap.get(n.getName()) != null) {
-                    nodeNamesList.add(n.getName());
-                }
-                if (n.getChilds().size() > 0) {
-                    lookChilds("", n, thresholdMap, nodeNamesList);
-                }
-            }
-        }
-    }
-
-    private void lookChildsAndCal(Map<String, Float> resultMap, Map<String, Float> thresholdMap, NlpCache.Node node, List<String> delList, String topName) {
-        for (NlpCache.Node n : node.getChilds()) {
-            if (thresholdMap.get(n.getName()) != null) {
-                if (resultMap.get(topName) == null) {
-                    resultMap.put(topName, thresholdMap.get(n.getName()));
-                } else {
-                    resultMap.put(topName, resultMap.get(topName) + thresholdMap.get(n.getName()));
-                }
-                delList.add(n.getName());
-            }
-            if (n.getChilds().size() > 0) {
-                lookChildsAndCal(resultMap, thresholdMap, n, delList, topName);
-            }
-        }
-    }
 }

+ 2 - 6
bigdata-web/src/test/java/org/diagbot/AddStandWordTest.java

@@ -1,7 +1,7 @@
 package org.diagbot;
 
 import org.diagbot.bigdata.common.ApplicationCacheUtil;
-import org.diagbot.bigdata.work.ParamsDataProxy;
+import org.diagbot.bigdata.work.BigDataParamsProxy;
 import org.diagbot.common.work.SearchData;
 import org.diagbot.nlp.participle.ParticipleUtil;
 import org.diagbot.nlp.participle.cfg.Configuration;
@@ -11,10 +11,6 @@ import org.diagbot.nlp.participle.word.LexemePath;
 
 import java.io.IOException;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 /**
  * @Description:
@@ -51,7 +47,7 @@ public class AddStandWordTest {
 
 
             SearchData searchData = new SearchData();
-            ParamsDataProxy paramsDataProxy = new ParamsDataProxy();
+            BigDataParamsProxy paramsDataProxy = new BigDataParamsProxy();
             searchData.setSymptom("安静时心率98次/分");
             searchData.setOther("心率156次/分");
             searchData.setVital("男性43岁");

+ 2 - 2
bigdata-web/src/main/java/org/diagbot/bigdata/util/BigDataConstants.java

@@ -1,4 +1,4 @@
-package org.diagbot.bigdata.util;
+package org.diagbot.common.push.util;
 
 /**
  * @ClassName org.diagbot.bigdata.util.BigDataConstants
@@ -7,7 +7,7 @@ package org.diagbot.bigdata.util;
  * @Date 2019/1/16/016 14:06
  * @Version 1.0
  **/
-public class BigDataConstants {
+public class PushConstants {
     public final static String resource_type_i = "I";       //住院
     public final static String resource_type_o = "O";       //门诊
     public final static String resource_type_e = "E";       //急诊

+ 256 - 0
common-push/src/main/java/org/diagbot/common/push/work/ParamsDataProxy.java

@@ -0,0 +1,256 @@
+package org.diagbot.common.push.work;
+
+import org.apache.commons.lang3.StringUtils;
+import org.diagbot.common.push.bean.SearchData;
+import org.diagbot.common.push.util.PushConstants;
+import org.diagbot.nlp.feature.FeatureAnalyze;
+import org.diagbot.nlp.feature.FeatureType;
+import org.diagbot.nlp.util.Constants;
+import org.diagbot.nlp.util.NegativeEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.*;
+import java.util.regex.Pattern;
+
+/**
+ * @ClassName org.diagbot.bigdata.work.ParamsDataProxy
+ * @Description TODO
+ * @Author fyeman
+ * @Date 2019/1/16/016 14:04
+ * @Version 1.0
+ **/
+public class ParamsDataProxy {
+    Logger logger = LoggerFactory.getLogger(ParamsDataProxy.class);
+    //标准词只处理的词性
+    public static NegativeEnum[] negativeEnums = new NegativeEnum[] { NegativeEnum.VITAL_INDEX, NegativeEnum.SYMPTOM
+            , NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME, NegativeEnum.UNIT, NegativeEnum.DIAG_STAND
+            , NegativeEnum.OTHER};
+    //标准词处理的三元组
+    public static NegativeEnum[][] negativeEnumTriple = {
+            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS, NegativeEnum.UNIT },
+            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
+            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS, NegativeEnum.UNIT },
+            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
+            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.UNIT },
+            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.EVENT_TIME },
+            { NegativeEnum.DIAG_STAND, NegativeEnum.DIGITS, NegativeEnum.OTHER }
+    };
+    //标准词处理的二元组
+    public static NegativeEnum[][] negativeEnumTwoTuple = {
+            { NegativeEnum.VITAL_INDEX, NegativeEnum.DIGITS },
+            { NegativeEnum.SYMPTOM, NegativeEnum.DIGITS }
+    };
+
+    public void createSearchData(SearchData searchData) throws Exception {
+        //消除空格
+        if (searchData.getSymptom() != null) {
+            searchData.setSymptom(searchData.getSymptom().trim());
+        }
+        if (searchData.getDiag() != null) {
+            searchData.setDiag(searchData.getDiag().trim());
+        }
+        //计算年龄区间
+        if (searchData.getAge() > 0) {
+            searchData.setAge_start(searchData.getAge() - 5);
+            searchData.setAge_end(searchData.getAge() + 5);
+        }
+        //修改性别代码
+        if (!StringUtils.isEmpty(searchData.getSex())) {
+            if ("M".equals(searchData.getSex())) {
+                searchData.setSex("1");
+            } else if ("F".equals(searchData.getSex())) {
+                searchData.setSex("2");
+            } else {
+                searchData.setSex("3");
+            }
+        } else {
+            searchData.setSex("3");
+        }
+        //默认查询门诊数据
+        if (StringUtils.isEmpty(searchData.getResourceType())) {
+            searchData.setResourceType(PushConstants.resource_type_o);
+        }
+        if (StringUtils.isNotEmpty(searchData.getSymptom())) {
+            searchData.setSymptom(searchData.getSymptom().trim());
+        }
+        //一次推送多个类别信息
+        String[] featureTypes = searchData.getFeatureType().split(",");
+        //featureType统一转换
+        String[] convertFeatureTypes = new String[featureTypes.length];
+        for (int i = 0; i < featureTypes.length; i++) {
+            convertFeatureTypes[i] = convertFeatureType(searchData.getSysCode(), featureTypes[i]);
+        }
+        searchData.setFeatureType(StringUtils.join(convertFeatureTypes, ","));
+        searchData.setFeatureTypes(convertFeatureTypes);
+
+        //获取入参中的特征信息
+        FeatureAnalyze fa = new FeatureAnalyze();
+        List<Map<String, Object>> featuresList = new ArrayList<>();
+        if (!StringUtils.isEmpty(searchData.getSymptom())) {
+            //提取现病史
+            featuresList = fa.start(searchData.getSymptom(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+            //提取时间信息
+            featuresList = fa.start(searchData.getSymptom(), FeatureType.TIME);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getVital())) {
+            //提取体征
+            featuresList = fa.start(searchData.getVital(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getPast())) {
+            //提取既往史
+            featuresList = fa.start(searchData.getPast(), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getOther()) || !StringUtils.isEmpty(searchData.getIndications())) {
+            //提取其他史等
+            featuresList = fa.start((searchData.getOther() == null ? "" : searchData.getOther()) + (searchData.getIndications() == null ? "" : searchData.getIndications()), FeatureType.FEATURE);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getPacs())) {
+            featuresList = fa.start(searchData.getPacs(), FeatureType.PACS);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getLis())) {
+            featuresList = fa.start(searchData.getLis(), FeatureType.LIS);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getOther())) {
+            //如果既往史中诊断信息,需要提取这个特征
+            featuresList = fa.start(searchData.getOther(), FeatureType.DIAG);
+            paramFeatureInit(searchData, featuresList);
+        }
+        if (!StringUtils.isEmpty(searchData.getDiag()) && StringUtils.isEmpty(searchData.getSymptom())) {
+            featuresList = fa.start(searchData.getDiag(), FeatureType.DIAG);
+            paramFeatureInit(searchData, featuresList);
+        }
+    }
+
+    /**
+     * 外部系统featureType需要转化为大数据定义的featureType
+     *
+     * @param sysCode
+     * @param featureType
+     * @return
+     */
+    private String convertFeatureType(String sysCode, String featureType) {
+        if (StringUtils.isEmpty(sysCode) || sysCode.equals("1")) {
+            if ("1".equals(featureType)) {
+                return PushConstants.feature_type_symptom;
+            }
+            if ("7".equals(featureType)) {
+                return PushConstants.feature_type_diag;
+            }
+            if ("4".equals(featureType)) {
+                return PushConstants.feature_type_vital;
+            }
+            if ("5".equals(featureType)) {
+                return PushConstants.feature_type_lis;
+            }
+            if ("6".equals(featureType)) {
+                return PushConstants.feature_type_pacs;
+            }
+            if ("3".equals(featureType)) {
+                return PushConstants.feature_type_history;
+            }
+            if ("8".equals(featureType)) {
+                return PushConstants.feature_type_treat;
+            }
+            if ("22".equals(featureType)) {
+                return PushConstants.feature_type_labelpush;
+            }
+            if ("11".equals(featureType)) {
+                return PushConstants.feature_type_manju;
+            }
+            if ("42".equals(featureType)) {
+                return PushConstants.feature_type_vital_index;
+            }
+            return null;
+        }
+        return featureType;
+    }
+
+    private SearchData cleanFeature(List<Map<String, Object>> featuresList, FeatureAnalyze fa,
+                                    SearchData searchData) {
+        // 在输入的辅检文本中,只提取辅检信息
+        String[] PACS_Feature = { Constants.word_property_PACS,
+                Constants.word_property_PACS_Detail, Constants.word_property_PACS_Result };
+        searchData = removeFeature(searchData.getLis(), fa, searchData, PACS_Feature, FeatureType.PACS);
+
+        // 在输入的化验文本中,只提取化验信息
+        String[] LIS_Feature = { Constants.word_property_LIS,
+                Constants.word_property_LIS_Detail, Constants.word_property_LIS_Result };
+        searchData = removeFeature(searchData.getPacs(), fa, searchData, LIS_Feature, FeatureType.LIS);
+
+        return searchData;
+    }
+
+    private SearchData removeFeature(String text, FeatureAnalyze fa,
+                                     SearchData searchData, String[] properties, FeatureType featureType) {
+        String name = "";
+        Boolean related = false;
+
+        try {
+            List<Map<String, Object>> featureList = fa.start(text, featureType);
+            if (featureList != null) {
+                for (Map<String, Object> item : featureList) {
+                    name = item.get("feature_name").toString();
+                    String[] property = item.get("property").toString().split(",");
+                    for (String prop : property) {
+                        if (Arrays.asList(properties).contains(prop)) {
+                            //                            related = true;
+                            searchData.getInputs().remove(name);
+                            break;
+                        }
+                    }
+                }
+            }
+
+        } catch (Exception ex) {
+            ex.printStackTrace();
+        } finally {
+            return searchData;
+        }
+    }
+
+    /**
+     * 推送模型入参
+     *
+     * @param searchData
+     * @throws Exception
+     */
+    private void paramFeatureInit(SearchData searchData, List<Map<String, Object>> featuresList) throws Exception {
+        if (featuresList != null && featuresList.size() > 0) {
+            Map<String, Object> featureMap = null;
+            for (int i = 0; i < featuresList.size(); i++) {
+                featureMap = featuresList.get(i);
+                Map<String, String> map = new HashMap<>();
+                for (Map.Entry<String, Object> entry : featureMap.entrySet()) {
+                    map.put(entry.getKey(), String.valueOf(entry.getValue()));
+                }
+                map.put("featureType", String.valueOf(featureMap.get("feature_type")));
+                map.put("featureName", String.valueOf(featureMap.get("feature_name")));
+                map.put("property", String.valueOf(featureMap.get("property")));
+                map.put("concept", String.valueOf(featureMap.get("concept")));
+                if (Constants.default_negative.equals(featureMap.get("negative"))) {
+                    if (map.get("featureType").equals(Constants.feature_type_time)) {
+                        searchData.getInputs().put("时间", map);
+                    } else {
+                        if (searchData.getInputs().get(map.get("feature_name")) == null) {
+                            if (i < 5) {
+                                searchData.getInputs().put(map.get("feature_name"), map);
+                            }
+                            searchData.getGraphInputs().put(map.get("feature_name"), map);
+                        }
+                    }
+                } else {
+                    searchData.getFilters().put(map.get("feature_name"), map);
+                }
+            }
+        }
+    }
+}