瀏覽代碼

Merge branch 'dev/mix20191104_gofastdfs' into dev/diagbotcloud20191024_pacs

# Conflicts:
#	config-server/src/main/resources/shared/knowledgeman-service-pro.yml
gaodm 5 年之前
父節點
當前提交
283eb63463
共有 100 個文件被更改,包括 2871 次插入1491 次删除
  1. 3 23
      admin-service/src/main/java/com/diagbot/AdminServiceApplication.java
  2. 31 5
      admin-service/src/main/resources/logback-spring.xml
  3. 31 5
      aipt-service/src/main/resources/logback-spring.xml
  4. 31 5
      bi-service/src/main/resources/logback-spring.xml
  5. 31 2
      config-server/src/main/resources/logback-spring.xml
  6. 38 0
      config-server/src/main/resources/shared/admin-service-pre.yml
  7. 1 1
      config-server/src/main/resources/shared/admin-service-pro.yml
  8. 1 1
      config-server/src/main/resources/shared/admin-service-test.yml
  9. 106 0
      config-server/src/main/resources/shared/aipt-service-pre.yml
  10. 4 4
      config-server/src/main/resources/shared/aipt-service-pro.yml
  11. 72 0
      config-server/src/main/resources/shared/application-pre.yml
  12. 1 1
      config-server/src/main/resources/shared/application-pro.yml
  13. 102 0
      config-server/src/main/resources/shared/bi-service-pre.yml
  14. 3 3
      config-server/src/main/resources/shared/bi-service-pro.yml
  15. 21 0
      config-server/src/main/resources/shared/data-service-pre.yml
  16. 1 1
      config-server/src/main/resources/shared/data-service-pro.yml
  17. 97 0
      config-server/src/main/resources/shared/diagbotman-service-pre.yml
  18. 2 2
      config-server/src/main/resources/shared/diagbotman-service-pro.yml
  19. 96 0
      config-server/src/main/resources/shared/feedback-service-pre.yml
  20. 2 2
      config-server/src/main/resources/shared/feedback-service-pro.yml
  21. 130 0
      config-server/src/main/resources/shared/gateway-service-pre.yml
  22. 22 22
      config-server/src/main/resources/shared/gateway-service-pro.yml
  23. 119 0
      config-server/src/main/resources/shared/icss-service-pre.yml
  24. 3 3
      config-server/src/main/resources/shared/icss-service-pro.yml
  25. 126 0
      config-server/src/main/resources/shared/icssman-service-pre.yml
  26. 4 4
      config-server/src/main/resources/shared/icssman-service-pro.yml
  27. 2 7
      config-server/src/main/resources/shared/knowledgeman-service-dev.yml
  28. 2 7
      config-server/src/main/resources/shared/knowledgeman-service-local.yml
  29. 134 0
      config-server/src/main/resources/shared/knowledgeman-service-pre.yml
  30. 7 16
      config-server/src/main/resources/shared/knowledgeman-service-pro.yml
  31. 2 7
      config-server/src/main/resources/shared/knowledgeman-service-test.yml
  32. 100 0
      config-server/src/main/resources/shared/logger-service-pre.yml
  33. 2 2
      config-server/src/main/resources/shared/logger-service-pro.yml
  34. 20 0
      config-server/src/main/resources/shared/ltapi-service-pre.yml
  35. 1 1
      config-server/src/main/resources/shared/ltapi-service-pro.yml
  36. 25 0
      config-server/src/main/resources/shared/monitor-service-pre.yml
  37. 1 1
      config-server/src/main/resources/shared/monitor-service-pro.yml
  38. 5 9
      config-server/src/main/resources/shared/prec-service-dev.yml
  39. 5 9
      config-server/src/main/resources/shared/prec-service-local.yml
  40. 103 0
      config-server/src/main/resources/shared/prec-service-pre.yml
  41. 7 11
      config-server/src/main/resources/shared/prec-service-pro.yml
  42. 5 10
      config-server/src/main/resources/shared/prec-service-test.yml
  43. 2 7
      config-server/src/main/resources/shared/precman-service-dev.yml
  44. 2 7
      config-server/src/main/resources/shared/precman-service-local.yml
  45. 105 0
      config-server/src/main/resources/shared/precman-service-pre.yml
  46. 4 9
      config-server/src/main/resources/shared/precman-service-pro.yml
  47. 2 7
      config-server/src/main/resources/shared/precman-service-test.yml
  48. 112 0
      config-server/src/main/resources/shared/tran-service-pre.yml
  49. 3 3
      config-server/src/main/resources/shared/tran-service-pro.yml
  50. 20 0
      config-server/src/main/resources/shared/triage-service-pre.yml
  51. 1 1
      config-server/src/main/resources/shared/triage-service-pro.yml
  52. 89 0
      config-server/src/main/resources/shared/uaa-service-pre.yml
  53. 2 2
      config-server/src/main/resources/shared/uaa-service-pro.yml
  54. 117 0
      config-server/src/main/resources/shared/user-service-pre.yml
  55. 3 3
      config-server/src/main/resources/shared/user-service-pro.yml
  56. 31 5
      data-service/src/main/resources/logback-spring.xml
  57. 31 5
      diagbotman-service/src/main/resources/logback-spring.xml
  58. 8 0
      eureka-server/src/main/resources/application-pre.yml
  59. 31 5
      eureka-server/src/main/resources/logback-spring.xml
  60. 31 5
      feedback-service/src/main/resources/logback-spring.xml
  61. 100 57
      gateway-service/src/main/resources/logback-spring.xml
  62. 32 6
      icss-service/src/main/resources/logback-spring.xml
  63. 31 5
      icssman-service/src/main/resources/logback-spring.xml
  64. 5 5
      knowledgeman-service/pom.xml
  65. 0 161
      knowledgeman-service/src/main/java/com/diagbot/client/fastdfs/FastDFSClient.java
  66. 0 70
      knowledgeman-service/src/main/java/com/diagbot/client/fastdfs/FastDFSFile.java
  67. 0 49
      knowledgeman-service/src/main/java/com/diagbot/config/FastDFSConfigurer.java
  68. 1 0
      knowledgeman-service/src/main/java/com/diagbot/dto/FileDTO.java
  69. 17 0
      knowledgeman-service/src/main/java/com/diagbot/dto/FileDeleteDTO.java
  70. 25 0
      knowledgeman-service/src/main/java/com/diagbot/dto/FileUploadDTO.java
  71. 3 1
      knowledgeman-service/src/main/java/com/diagbot/facade/DiagnoseFacade.java
  72. 80 49
      knowledgeman-service/src/main/java/com/diagbot/service/impl/UploadServiceImpl.java
  73. 2 8
      knowledgeman-service/src/main/java/com/diagbot/web/UploadController.java
  74. 31 5
      knowledgeman-service/src/main/resources/logback-spring.xml
  75. 31 5
      log-service/src/main/resources/logback-spring.xml
  76. 31 5
      ltapi-service/src/main/resources/logback-spring.xml
  77. 31 5
      monitor-service/src/main/resources/logback-spring.xml
  78. 7 0
      pom.xml
  79. 9 5
      prec-service/pom.xml
  80. 103 0
      prec-service/src/main/java/com/diagbot/aggregate/UploadAggregate.java
  81. 0 161
      prec-service/src/main/java/com/diagbot/client/fastdfs/FastDFSClient.java
  82. 0 70
      prec-service/src/main/java/com/diagbot/client/fastdfs/FastDFSFile.java
  83. 0 49
      prec-service/src/main/java/com/diagbot/config/FastDFSConfigurer.java
  84. 3 9
      prec-service/src/main/java/com/diagbot/dto/FileDTO.java
  85. 17 0
      prec-service/src/main/java/com/diagbot/dto/FileDeleteDTO.java
  86. 25 0
      prec-service/src/main/java/com/diagbot/dto/FileUploadDTO.java
  87. 1 7
      prec-service/src/main/java/com/diagbot/dto/GetInquiryDetailImgDTO.java
  88. 1 4
      prec-service/src/main/java/com/diagbot/facade/InquiryInfoFacade.java
  89. 5 8
      prec-service/src/main/java/com/diagbot/service/UploadService.java
  90. 121 185
      prec-service/src/main/java/com/diagbot/service/impl/UploadServiceImpl.java
  91. 0 7
      prec-service/src/main/java/com/diagbot/vo/SaveInquiryReportVO.java
  92. 12 42
      prec-service/src/main/java/com/diagbot/web/UploadController.java
  93. 31 5
      prec-service/src/main/resources/logback-spring.xml
  94. 5 5
      precman-service/pom.xml
  95. 0 161
      precman-service/src/main/java/com/diagbot/client/fastdfs/FastDFSClient.java
  96. 0 70
      precman-service/src/main/java/com/diagbot/client/fastdfs/FastDFSFile.java
  97. 0 49
      precman-service/src/main/java/com/diagbot/config/FastDFSConfigurer.java
  98. 1 0
      precman-service/src/main/java/com/diagbot/dto/FileDTO.java
  99. 17 0
      precman-service/src/main/java/com/diagbot/dto/FileDeleteDTO.java
  100. 0 0
      precman-service/src/main/java/com/diagbot/dto/FileUploadDTO.java

+ 3 - 23
admin-service/src/main/java/com/diagbot/AdminServiceApplication.java

@@ -33,9 +33,9 @@ public class AdminServiceApplication {
         SpringApplication.run(AdminServiceApplication.class, args);
     }
 
-    @Profile("local")
+    @Profile({ "local", "dev", "test" })
     @Configuration
-    public static class SecurityPermitAllConfigLocal extends WebSecurityConfigurerAdapter {
+    public static class SecurityPermitAllConfig extends WebSecurityConfigurerAdapter {
         @Override
         protected void configure(HttpSecurity http) throws Exception {
             http.authorizeRequests().anyRequest().permitAll()
@@ -43,27 +43,7 @@ public class AdminServiceApplication {
         }
     }
 
-    @Profile("dev")
-    @Configuration
-    public static class SecurityPermitAllConfigDev extends WebSecurityConfigurerAdapter {
-        @Override
-        protected void configure(HttpSecurity http) throws Exception {
-            http.authorizeRequests().anyRequest().permitAll()
-                    .and().csrf().disable();
-        }
-    }
-
-    @Profile("test")
-    @Configuration
-    public static class SecurityPermitAllConfigTest extends WebSecurityConfigurerAdapter {
-        @Override
-        protected void configure(HttpSecurity http) throws Exception {
-            http.authorizeRequests().anyRequest().permitAll()
-                    .and().csrf().disable();
-        }
-    }
-
-    @Profile("pro")
+    @Profile({ "pre", "pro" })
     @Configuration
     public static class SecuritySecureConfig extends WebSecurityConfigurerAdapter {
         private final String adminContextPath;

+ 31 - 5
admin-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"admin-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"admin-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 31 - 5
aipt-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"aipt-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"aipt-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 31 - 5
bi-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"bi-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"bi-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 31 - 2
config-server/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"config-server"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"config-server"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="native">
         <root level="INFO">
@@ -277,6 +293,18 @@
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -284,7 +312,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 38 - 0
config-server/src/main/resources/shared/admin-service-pre.yml

@@ -0,0 +1,38 @@
+server:
+  port: 9998
+
+#spring:
+#  boot:
+#    admin:
+#      routes:
+#        endpoints: env,metrics,dump,jolokia,info,configprops,trace,logfile,refresh,flyway,liquibase,heapdump,loggers,auditevents,hystrix.stream,activiti
+#      turbine:
+#        clusters: default
+#        location: monitor-service
+
+security2:
+  user:
+    name: "admin"
+    password: "123456"
+
+
+spring:
+  profiles: pre
+  security:
+    user:
+      name: ${security2.user.name}
+      password: ${security2.user.password}
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+eureka:
+  instance:
+    metadata-map:
+      user.name: ${security2.user.name}
+      user.password: ${security2.user.password}

+ 1 - 1
config-server/src/main/resources/shared/admin-service-pro.yml

@@ -24,7 +24,7 @@ spring:
       password: ${security2.user.password}
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone

+ 1 - 1
config-server/src/main/resources/shared/admin-service-test.yml

@@ -17,7 +17,7 @@ security2:
 
 
 spring:
-  profiles: dev
+  profiles: test
   #  security:
   #    user:
   #      name: ${security2.user.name}

+ 106 - 0
config-server/src/main/resources/shared/aipt-service-pre.yml

@@ -0,0 +1,106 @@
+server:
+  port: 8845
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/med?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+
+io.github.lvyahui8.spring.base-packages: com.diagbot.aggregate
+
+ai:
+  server:
+    address: http://192.168.2.186:5008
+
+nlp:
+  server:
+    address: http://192.168.2.186:5002

+ 4 - 4
config-server/src/main/resources/shared/aipt-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/med?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -59,7 +59,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -99,8 +99,8 @@ io.github.lvyahui8.spring.base-packages: com.diagbot.aggregate
 
 ai:
   server:
-    address: http://192.168.2.234:5008
+    address: http://192.168.2.123:5008
 
 nlp:
   server:
-    address: http://192.168.2.234:5002
+    address: http://192.168.2.123:5002

+ 72 - 0
config-server/src/main/resources/shared/application-pre.yml

@@ -0,0 +1,72 @@
+#logging:
+#  level:
+#    org.springframework.security: INFO
+
+hystrix:
+  command:
+    default:
+      execution:
+        isolation:
+          thread:
+            timeoutInMilliseconds: 20000
+
+ribbon:
+  ReadTimeout: 20000
+  ConnectTimeout: 20000
+  MaxAutoRetries: 0
+  MaxAutoRetriesNextServer: 1
+
+eureka:
+  instance:
+    prefer-ip-address: true #使用IP注册
+    instance-id: ${spring.cloud.client.ip-address}:${server.port}
+    leaseRenewalIntervalInSeconds: 10
+    health-check-url-path: /actuator/health #2.0后actuator的地址发生了变化
+  client:
+    registryFetchIntervalSeconds: 5
+#    serviceUrl:
+#      defaultZone: http://eureka1:8761/eureka/
+
+#endpoints:
+#  health:
+#    sensitive: false
+#    enabled: true
+#  actuator:
+#    enabled: true
+#    sensitive: false
+#  beans:
+#    sensitive: false
+#    enabled: true
+
+
+management:
+  endpoints:
+    web:
+      exposure:
+        include: bus-refresh,health,info,hystrix.stream
+      cors:
+        allowed-origins: "*"
+        allowed-methods: "*"
+  endpoint:
+    health:
+      show-details: always
+feign:
+  hystrix:
+    enabled: true
+
+spring:
+  #消息总线
+  cloud:
+    bus:
+      enabled: true
+      trace:
+        enabled: true
+  jackson:
+    date-format: yyyy-MM-dd HH:mm:ss
+    time-zone: GMT+8
+
+server:
+  max-http-header-size: 10MB
+
+swagger:
+  enable: true

+ 1 - 1
config-server/src/main/resources/shared/application-pro.yml

@@ -69,4 +69,4 @@ server:
   max-http-header-size: 10MB
 
 swagger:
-  enable: false
+  enable: true

+ 102 - 0
config-server/src/main/resources/shared/bi-service-pre.yml

@@ -0,0 +1,102 @@
+server:
+  port: 8841
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+
+io.github.lvyahui8.spring.base-packages: com.diagbot.aggregate
+
+neo:
+  server:
+    address: http://192.168.2.186:5004

+ 3 - 3
config-server/src/main/resources/shared/bi-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -59,7 +59,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -99,4 +99,4 @@ io.github.lvyahui8.spring.base-packages: com.diagbot.aggregate
 
 neo:
   server:
-    address: http://192.168.2.234:5004
+    address: http://192.168.2.123:5004

+ 21 - 0
config-server/src/main/resources/shared/data-service-pre.yml

@@ -0,0 +1,21 @@
+server:
+  port: 8823
+
+# 驱动配置信息
+spring:
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+

+ 1 - 1
config-server/src/main/resources/shared/data-service-pro.yml

@@ -12,7 +12,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone

+ 97 - 0
config-server/src/main/resources/shared/diagbotman-service-pre.yml

@@ -0,0 +1,97 @@
+server:
+  port: 8811
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-diagbotman?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+

+ 2 - 2
config-server/src/main/resources/shared/diagbotman-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-diagbotman?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-diagbotman?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -59,7 +59,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone

+ 96 - 0
config-server/src/main/resources/shared/feedback-service-pre.yml

@@ -0,0 +1,96 @@
+server:
+  port: 8831
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false

+ 2 - 2
config-server/src/main/resources/shared/feedback-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -59,7 +59,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone

+ 130 - 0
config-server/src/main/resources/shared/gateway-service-pre.yml

@@ -0,0 +1,130 @@
+spring:
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+    #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+    gateway:
+      default-filters:
+      routes:
+      - id: user-service
+        uri: lb://user-service
+        predicates:
+        - Path=/api/user/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: logger-service
+        uri: lb://logger-service
+        predicates:
+        - Path=/api/log/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: bi-service
+        uri: lb://bi-service
+        predicates:
+        - Path=/api/bi/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: diagbotman-service
+        uri: lb://diagbotman-service
+        predicates:
+        - Path=/api/diagbotman/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: feedback-service
+        uri: lb://feedback-service
+        predicates:
+        - Path=/api/feedback/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: triage-service
+        uri: lb://triage-service
+        predicates:
+        - Path=/api/triage/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: icss-service
+        uri: lb://icss-service
+        predicates:
+        - Path=/api/icss/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: icssman-service
+        uri: lb://icssman-service
+        predicates:
+        - Path=/api/icssman/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: knowledgeman-service
+        uri: lb://knowledgeman-service
+        predicates:
+        - Path=/api/knowledgeman/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: prec-service
+        uri: lb://prec-service
+        predicates:
+        - Path=/api/prec/**
+        filters:
+        #        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: tran-service
+        uri: lb://tran-service
+        predicates:
+        - Path=/api/tran/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: aipt-service
+        uri: lb://aipt-service
+        predicates:
+        - Path=/api/aipt/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: data-service
+        uri: lb://data-service
+        predicates:
+        - Path=/api/data/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: ltapi-service
+        uri: lb://ltapi-service
+        predicates:
+        - Path=/api/ltapi/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+      - id: precman-service
+        uri: lb://precman-service
+        predicates:
+        - Path=/api/precman/**
+        filters:
+#        - SwaggerHeaderFilter
+        - StripPrefix=2
+
+server:
+  port: 5050
+
+lantone:
+  product: triagett,143;icsstt,2
+

+ 22 - 22
config-server/src/main/resources/shared/gateway-service-pro.yml

@@ -1,7 +1,7 @@
 spring:
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -21,70 +21,63 @@ spring:
         predicates:
         - Path=/api/user/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: logger-service
         uri: lb://logger-service
         predicates:
         - Path=/api/log/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: bi-service
         uri: lb://bi-service
         predicates:
         - Path=/api/bi/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: diagbotman-service
         uri: lb://diagbotman-service
         predicates:
         - Path=/api/diagbotman/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: feedback-service
         uri: lb://feedback-service
         predicates:
         - Path=/api/feedback/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: triage-service
         uri: lb://triage-service
         predicates:
         - Path=/api/triage/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: icss-service
         uri: lb://icss-service
         predicates:
         - Path=/api/icss/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: icssman-service
         uri: lb://icssman-service
         predicates:
         - Path=/api/icssman/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: knowledgeman-service
         uri: lb://knowledgeman-service
         predicates:
         - Path=/api/knowledgeman/**
         filters:
-#        - SwaggerHeaderFilter
-        - StripPrefix=2
-      - id: tran-service
-        uri: lb://tran-service
-        predicates:
-        - Path=/api/tran/**
-        filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: prec-service
         uri: lb://prec-service
@@ -93,38 +86,45 @@ spring:
         filters:
         #        - SwaggerHeaderFilter
         - StripPrefix=2
+      - id: tran-service
+        uri: lb://tran-service
+        predicates:
+        - Path=/api/tran/**
+        filters:
+        #        - SwaggerHeaderFilter
+        - StripPrefix=2
       - id: aipt-service
         uri: lb://aipt-service
         predicates:
         - Path=/api/aipt/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: data-service
         uri: lb://data-service
         predicates:
         - Path=/api/data/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: ltapi-service
         uri: lb://ltapi-service
         predicates:
         - Path=/api/ltapi/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
       - id: precman-service
         uri: lb://precman-service
         predicates:
         - Path=/api/precman/**
         filters:
-#        - SwaggerHeaderFilter
+        #        - SwaggerHeaderFilter
         - StripPrefix=2
 
 server:
   port: 5050
 
 lantone:
-  product: triage,1;icss,2
+  product: triagett,143;icsstt,2
 

+ 119 - 0
config-server/src/main/resources/shared/icss-service-pre.yml

@@ -0,0 +1,119 @@
+server:
+  port: 8843
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-icss?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+        #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+        outputPoint:
+          destination: myPoint
+        #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+        inputPoint:
+          destination: myPoint
+          group: pointReceiveGroup
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+  #redis
+  redis:
+    database:
+      cache: 5 # Redis缓存索引
+    host: 192.168.2.121  #Redis服务器地址
+    port: 6379 # Redis服务器连接端口
+    password: lantone # Redis服务器连接密码(默认为空)
+    lettuce:
+      pool:
+        max-active: 8 # 连接池最大连接数(使用负值表示没有限制)
+        max-idle: 5 # 连接池中的最大空闲连接
+        max-wait: -1 # 连接池最大阻塞等待时间(使用负值表示没有限制)
+        min-idle: 0 # 连接池中的最小空闲连接
+    timeout: 20000 # 连接超时时间(毫秒)
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+
+io.github.lvyahui8.spring.base-packages: com.diagbot.aggregate

+ 3 - 3
config-server/src/main/resources/shared/icss-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-icss?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-icss?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -65,7 +65,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -76,7 +76,7 @@ spring:
   redis:
     database:
       cache: 5 # Redis缓存索引
-    host: 192.168.2.236  #Redis服务器地址
+    host: 192.168.2.122  #Redis服务器地址
     port: 6379 # Redis服务器连接端口
     password: lantone # Redis服务器连接密码(默认为空)
     lettuce:

+ 126 - 0
config-server/src/main/resources/shared/icssman-service-pre.yml

@@ -0,0 +1,126 @@
+server:
+  port: 8844
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-icss?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  servlet:
+    multipart:
+      enabled: true
+      max-file-size: 1MB
+      max-request-size: 1MB
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+  #redis
+  redis:
+    database:
+      cache: 5 # Redis缓存索引
+    host: 192.168.2.121  #Redis服务器地址
+    port: 6379 # Redis服务器连接端口
+    password: lantone # Redis服务器连接密码(默认为空)
+    lettuce:
+      pool:
+        max-active: 8 # 连接池最大连接数(使用负值表示没有限制)
+        max-idle: 5 # 连接池中的最大空闲连接
+        max-wait: -1 # 连接池最大阻塞等待时间(使用负值表示没有限制)
+        min-idle: 0 # 连接池中的最小空闲连接
+    timeout: 20000 # 连接超时时间(毫秒)
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: 1
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+
+fastdfs:
+  connect_timeout_in_seconds: 60
+  network_timeout_in_seconds: 60
+  charset: UTF-8
+  http_tracker_http_port: 8080
+  http_anti_steal_token: no
+  tracker_servers: 192.168.2.121:22122
+

+ 4 - 4
config-server/src/main/resources/shared/icssman-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-icss?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-icss?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -65,7 +65,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -76,7 +76,7 @@ spring:
   redis:
     database:
       cache: 5 # Redis缓存索引
-    host: 192.168.2.236  #Redis服务器地址
+    host: 192.168.2.122  #Redis服务器地址
     port: 6379 # Redis服务器连接端口
     password: lantone # Redis服务器连接密码(默认为空)
     lettuce:
@@ -122,5 +122,5 @@ fastdfs:
   charset: UTF-8
   http_tracker_http_port: 8080
   http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
+  tracker_servers: 192.168.2.122:22122
 

+ 2 - 7
config-server/src/main/resources/shared/knowledgeman-service-dev.yml

@@ -122,13 +122,8 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
+imageUrl:
+  prefix: http://192.168.2.236:82
 
 neo:
   server:

+ 2 - 7
config-server/src/main/resources/shared/knowledgeman-service-local.yml

@@ -122,13 +122,8 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
+imageUrl:
+  prefix: http://192.168.2.236:82
 
 neo:
   server:

+ 134 - 0
config-server/src/main/resources/shared/knowledgeman-service-pre.yml

@@ -0,0 +1,134 @@
+server:
+  port: 8822
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/med?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false&allowMultiQueries=true
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  servlet:
+    multipart:
+      enabled: true
+      max-file-size: 5MB
+      max-request-size: 5MB
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+        #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+        outputWord:
+          destination: myWord
+        #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+        inputWord:
+          destination: myWord
+          group: wordReceiveGroup
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+  #redis
+  redis:
+    database:
+      cache: 6 # Redis缓存索引
+    host: 192.168.2.121  #Redis服务器地址
+    port: 6379 # Redis服务器连接端口
+    password: lantone # Redis服务器连接密码(默认为空)
+    lettuce:
+      pool:
+        max-active: 8 # 连接池最大连接数(使用负值表示没有限制)
+        max-idle: 5 # 连接池中的最大空闲连接
+        max-wait: -1 # 连接池最大阻塞等待时间(使用负值表示没有限制)
+        min-idle: 0 # 连接池中的最小空闲连接
+    timeout: 20000 # 连接超时时间(毫秒)
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: 1
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+
+imageUrl:
+  prefix: http://192.168.2.121:82
+
+neo:
+  server:
+    address: http://192.168.2.186:5004
+
+ai:
+  server:
+    address: http://192.168.2.186:5008

+ 7 - 16
config-server/src/main/resources/shared/knowledgeman-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false&allowMultiQueries=true
+      url: jdbc:mysql://192.168.2.122:3306/med?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false&allowMultiQueries=true
       username: root
       password: lantone
       # 连接池的配置信息
@@ -71,7 +71,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -82,7 +82,7 @@ spring:
   redis:
     database:
       cache: 6 # Redis缓存索引
-    host: 192.168.2.236  #Redis服务器地址
+    host: 192.168.2.122  #Redis服务器地址
     port: 6379 # Redis服务器连接端口
     password: lantone # Redis服务器连接密码(默认为空)
     lettuce:
@@ -122,22 +122,13 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
+imageUrl:
+  prefix: http://192.168.2.122:82
 
 neo:
   server:
-    address: http://192.168.2.241:5004
+    address: http://192.168.2.123:5004
 
 ai:
   server:
-    address: http://192.168.2.234:5008
-
-nlp:
-  server:
-    address: http://192.168.2.234:5002
+    address: http://192.168.2.123:5008

+ 2 - 7
config-server/src/main/resources/shared/knowledgeman-service-test.yml

@@ -122,13 +122,8 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.241:22122
+imageUrl:
+  prefix: http://192.168.2.241:82
 
 neo:
   server:

+ 100 - 0
config-server/src/main/resources/shared/logger-service-pre.yml

@@ -0,0 +1,100 @@
+server:
+  port: 9997
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+        #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+        inputLog:
+          destination: myLog
+          group: logReceiveGroup     # 具体分组 对应 MQ 是 队列名称 并且持久化队列
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+

+ 2 - 2
config-server/src/main/resources/shared/logger-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-log?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -62,7 +62,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone

+ 20 - 0
config-server/src/main/resources/shared/ltapi-service-pre.yml

@@ -0,0 +1,20 @@
+server:
+  port: 8824
+
+# 驱动配置信息
+spring:
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /

+ 1 - 1
config-server/src/main/resources/shared/ltapi-service-pro.yml

@@ -12,7 +12,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone

+ 25 - 0
config-server/src/main/resources/shared/monitor-service-pre.yml

@@ -0,0 +1,25 @@
+server:
+  port: 8766
+#security.basic.enabled: false
+turbine:
+  aggregator:
+    clusterConfig: default
+  appConfig: user-service , logger-service
+  clusterNameExpression: new String("default")
+  combine-host: true
+  instanceUrlSuffix:
+    default: actuator/hystrix.stream
+
+#mq
+spring:
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+#management:
+#  security:
+#    enabled: false

+ 1 - 1
config-server/src/main/resources/shared/monitor-service-pro.yml

@@ -13,7 +13,7 @@ turbine:
 #mq
 spring:
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone

+ 5 - 9
config-server/src/main/resources/shared/prec-service-dev.yml

@@ -95,13 +95,9 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
-
 imageUrl:
-  prefix: http://192.168.2.236:82
+  prefix: http://192.168.2.236:82
+
+io.github.lvyahui8.spring:
+  base-packages: com.diagbot.aggregate
+  thread-number: 12

+ 5 - 9
config-server/src/main/resources/shared/prec-service-local.yml

@@ -95,13 +95,9 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
-
 imageUrl:
-  prefix: http://192.168.2.236:82
+  prefix: http://192.168.2.236:82
+
+io.github.lvyahui8.spring:
+  base-packages: com.diagbot.aggregate
+  thread-number: 12

+ 103 - 0
config-server/src/main/resources/shared/prec-service-pre.yml

@@ -0,0 +1,103 @@
+server:
+  port: 8849
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-prec?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+
+imageUrl:
+  prefix: http://192.168.2.121:82
+
+io.github.lvyahui8.spring:
+  base-packages: com.diagbot.aggregate
+  thread-number: 12

+ 7 - 11
config-server/src/main/resources/shared/prec-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-prec?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-prec?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -59,7 +59,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -95,13 +95,9 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
-
 imageUrl:
-  prefix: http://192.168.2.236:82
+  prefix: http://192.168.2.122:82
+
+io.github.lvyahui8.spring:
+  base-packages: com.diagbot.aggregate
+  thread-number: 12

+ 5 - 10
config-server/src/main/resources/shared/prec-service-test.yml

@@ -95,14 +95,9 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.241:22122
-
 imageUrl:
-  prefix: http://192.168.2.241:82
+  prefix: http://192.168.2.241:82
+
+io.github.lvyahui8.spring:
+  base-packages: com.diagbot.aggregate
+  thread-number: 12

+ 2 - 7
config-server/src/main/resources/shared/precman-service-dev.yml

@@ -101,10 +101,5 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
+imageUrl:
+  prefix: http://192.168.2.236:82

+ 2 - 7
config-server/src/main/resources/shared/precman-service-local.yml

@@ -101,10 +101,5 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
+imageUrl:
+  prefix: http://192.168.2.236:82

+ 105 - 0
config-server/src/main/resources/shared/precman-service-pre.yml

@@ -0,0 +1,105 @@
+server:
+  port: 8850
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-prec?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  servlet:
+    multipart:
+      enabled: true
+      max-file-size: 5MB
+      max-request-size: 5MB
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: 1
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+
+imageUrl:
+  prefix: http://192.168.2.121:82

+ 4 - 9
config-server/src/main/resources/shared/precman-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-prec?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-prec?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -65,7 +65,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -101,10 +101,5 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.236:22122
+imageUrl:
+  prefix: http://192.168.2.122:82

+ 2 - 7
config-server/src/main/resources/shared/precman-service-test.yml

@@ -101,10 +101,5 @@ mybatis-plus:
     map-underscore-to-camel-case: true
     cache-enabled: false
 
-fastdfs:
-  connect_timeout_in_seconds: 60
-  network_timeout_in_seconds: 60
-  charset: UTF-8
-  http_tracker_http_port: 8080
-  http_anti_steal_token: no
-  tracker_servers: 192.168.2.241:22122
+imageUrl:
+  prefix: http://192.168.2.241:82

+ 112 - 0
config-server/src/main/resources/shared/tran-service-pre.yml

@@ -0,0 +1,112 @@
+server:
+  port: 8825
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-tran?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+  #redis
+  redis:
+    database:
+      mr: 7 # Redis病历索引
+    host: 192.168.2.121  #Redis服务器地址
+    port: 6379 # Redis服务器连接端口
+    password: lantone # Redis服务器连接密码(默认为空)
+    lettuce:
+      pool:
+        max-active: 8 # 连接池最大连接数(使用负值表示没有限制)
+        max-idle: 5 # 连接池中的最大空闲连接
+        max-wait: -1 # 连接池最大阻塞等待时间(使用负值表示没有限制)
+        min-idle: 0 # 连接池中的最小空闲连接
+    timeout: 20000 # 连接超时时间(毫秒)
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+

+ 3 - 3
config-server/src/main/resources/shared/tran-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-tran?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-tran?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -59,7 +59,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -70,7 +70,7 @@ spring:
   redis:
     database:
       mr: 7 # Redis病历索引
-    host: 192.168.2.236  #Redis服务器地址
+    host: 192.168.2.122  #Redis服务器地址
     port: 6379 # Redis服务器连接端口
     password: lantone # Redis服务器连接密码(默认为空)
     lettuce:

+ 20 - 0
config-server/src/main/resources/shared/triage-service-pre.yml

@@ -0,0 +1,20 @@
+server:
+  port: 8842
+
+# 驱动配置信息
+spring:
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /

+ 1 - 1
config-server/src/main/resources/shared/triage-service-pro.yml

@@ -12,7 +12,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone

+ 89 - 0
config-server/src/main/resources/shared/uaa-service-pre.yml

@@ -0,0 +1,89 @@
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-user?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+
+server:
+  port: 9999

+ 2 - 2
config-server/src/main/resources/shared/uaa-service-pro.yml

@@ -4,7 +4,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-user?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-user?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -49,7 +49,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone

+ 117 - 0
config-server/src/main/resources/shared/user-service-pre.yml

@@ -0,0 +1,117 @@
+server:
+  port: 8762
+
+# 驱动配置信息
+spring:
+  datasource:
+    druid:
+      driver-class-name: com.mysql.cj.jdbc.Driver
+      platform: mysql
+      url: jdbc:mysql://192.168.2.121:3306/sys-user?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      username: teamback
+      password: goTulmLeon
+      # 连接池的配置信息
+      # 初始化大小,最小,最大
+      initialSize: 5
+      minIdle: 5
+      maxActive: 20
+      # 配置获取连接等待超时的时间
+      maxWait: 60000
+      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+      timeBetweenEvictionRunsMillis: 60000
+      # 配置一个连接在池中最小生存的时间,单位是毫秒
+      minEvictableIdleTimeMillis: 300000
+      validationQuery: SELECT 1 FROM DUAL
+      testWhileIdle: true
+      testOnBorrow: false
+      testOnReturn: false
+      # 打开PSCache,并且指定每个连接上PSCache的大小
+      poolPreparedStatements: true
+      maxPoolPreparedStatementPerConnectionSize: 20
+      # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
+      filters.commons-log.connection-logger-name: wall,log4j
+      filter:
+        stat:
+          enabled: true
+          mergeSql: true
+          log-slow-sql: true
+          slow-sql-millis: 2000
+      #监控配置
+      web-stat-filter:
+        enabled: true
+        url-pattern: /*
+        exclusions: '*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*'
+
+      # StatViewServlet配置,说明请参考Druid Wiki,配置_StatViewServlet配置
+      stat-view-servlet:
+        enabled: true
+        url-pattern: /druid/*
+        reset-enable: false
+        login-username: root
+        login-password: root
+
+  cloud:
+    stream:
+      bindings:
+        outputLog:
+          destination: myLog
+  #          contentType: text/plain      # 实体 json string 在传递的类型装换 查看 http://docs.spring
+
+  #mq
+  rabbitmq:
+    host: 192.168.2.121
+    port: 5672
+    username: lantone
+    password: lantone
+    publisher-confirms: true
+    virtual-host: /
+
+  #redis
+  redis:
+    database:
+      cache: 0 # Redis缓存索引
+      idc: 1 # 不可见ID索引
+      sms: 2 # Redis短信索引
+      img: 3 # Redis图片验证码索引
+      token: 4 # Token索引
+    host: 192.168.2.121  #Redis服务器地址
+    port: 6379 # Redis服务器连接端口
+    password: lantone # Redis服务器连接密码(默认为空)
+    lettuce:
+      pool:
+        max-active: 8 # 连接池最大连接数(使用负值表示没有限制)
+        max-idle: 5 # 连接池中的最大空闲连接
+        max-wait: -1 # 连接池最大阻塞等待时间(使用负值表示没有限制)
+        min-idle: 0 # 连接池中的最小空闲连接
+    timeout: 20000 # 连接超时时间(毫秒)
+
+#mybatis
+mybatis-plus:
+  mapper-locations: classpath:/mapper/*Mapper.xml
+  #实体扫描,多个package用逗号或者分号分隔
+  typeAliasesPackage: com.diagbot.entity
+  global-config:
+    #刷新mapper 调试神器
+    db-config:
+      #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
+      id-type: id_worker
+      #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
+      field-strategy: not_empty
+      #驼峰下划线转换
+      column-underline: true
+      #数据库大写下划线转换
+      #capital-mode: true
+      #刷新mapper 调试神器
+      refresh-mapper: true
+      #逻辑删除配置
+      logic-delete-value: 0
+      logic-not-delete-value: 1
+      #自定义填充策略接口实现
+      #meta-object-handler: com.baomidou.springboot.xxx
+      #自定义SQL注入器
+      #sql-injector: com.baomidou.springboot.xxx
+  configuration:
+    map-underscore-to-camel-case: true
+    cache-enabled: false
+
+foo: foo version 1

+ 3 - 3
config-server/src/main/resources/shared/user-service-pro.yml

@@ -7,7 +7,7 @@ spring:
     druid:
       driver-class-name: com.mysql.cj.jdbc.Driver
       platform: mysql
-      url: jdbc:mysql://192.168.2.236:3306/sys-user?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
+      url: jdbc:mysql://192.168.2.122:3306/sys-user?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf8&characterSetResults=utf8&useSSL=false
       username: root
       password: lantone
       # 连接池的配置信息
@@ -59,7 +59,7 @@ spring:
 
   #mq
   rabbitmq:
-    host: 192.168.2.236
+    host: 192.168.2.122
     port: 5672
     username: lantone
     password: lantone
@@ -74,7 +74,7 @@ spring:
       sms: 2 # Redis短信索引
       img: 3 # Redis图片验证码索引
       token: 4 # Token索引
-    host: 192.168.2.236  #Redis服务器地址
+    host: 192.168.2.122  #Redis服务器地址
     port: 6379 # Redis服务器连接端口
     password: lantone # Redis服务器连接密码(默认为空)
     lettuce:

+ 31 - 5
data-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"data-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"data-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 31 - 5
diagbotman-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"diagbotman-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"diagbotman-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 8 - 0
eureka-server/src/main/resources/application-pre.yml

@@ -0,0 +1,8 @@
+server:
+  port: 8761
+eureka:
+  client:
+    register-with-eureka: false
+    fetch-registry: false
+    serviceUrl:
+      defaultZone: http://eureka1:${server.port}/eureka/

+ 31 - 5
eureka-server/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"eureka-server"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"eureka-server"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 31 - 5
feedback-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"feedback-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"feedback-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 100 - 57
gateway-service/src/main/resources/logback-spring.xml

@@ -1,19 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <configuration>
     <!-- 项目名称 -->
-    <property name="APPDIR" value="gateway-service" />
+    <property name="APPDIR" value="gateway-service"/>
     <!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
-    <property name="LOG_PATH" value="../logs" />
+    <property name="LOG_PATH" value="../logs"/>
 
     <!-- 彩色日志 -->
     <!-- 彩色日志依赖的渲染类 -->
-    <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
-    <conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
-    <conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
+    <conversionRule conversionWord="clr"
+                    converterClass="org.springframework.boot.logging.logback.ColorConverter"/>
+    <conversionRule conversionWord="wex"
+                    converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter"/>
+    <conversionRule conversionWord="wEx"
+                    converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
-    <!--<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}" />-->
+    <!--<property name="CONSOLE_LOG_PATTERN"-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
-    <property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}" />
+    <property name="CONSOLE_LOG_PATTERN"
+              value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
 
     <!--  日志记录器,日期滚动记录
             ERROR 级别
@@ -26,12 +31,14 @@
             <!-- 归档的日志文件的路径,例如今天是1992-11-06日志,当前写的日志文件路径为file节点指定,
             可以将此文件与file指定文件路径设置为不同路径,从而将当前日志文件或归档日志文件置不同的目录。
             而1992-11-06的日志文件在由fileNamePattern指定。%d{yyyy-MM-dd}指定日期格式,%i指定索引 -->
-            <fileNamePattern>${LOG_PATH}/${APPDIR}/error/${APPDIR}-error-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <fileNamePattern>${LOG_PATH}/${APPDIR}/error/${APPDIR}-error-%d{yyyy-MM-dd}.%i.log
+            </fileNamePattern>
             <!--  保留日志天数 -->
             <maxHistory>30</maxHistory>
             <!-- 除按日志记录之外,还配置了日志文件不能超过10MB,若超过10MB,日志文件会以索引0开始,
             命名日志文件,例如log-error-1992-11-06.0.log -->
-            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+            <timeBasedFileNamingAndTriggeringPolicy
+                    class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                 <maxFileSize>10MB</maxFileSize>
             </timeBasedFileNamingAndTriggeringPolicy>
         </rollingPolicy>
@@ -39,7 +46,8 @@
         <append>true</append>
         <!-- 日志文件的格式 -->
         <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
-            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level --- [%thread] %logger Line:%-3L - %msg%n</pattern>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level --- [%thread] %logger Line:%-3L - %msg%n
+            </pattern>
             <charset>utf-8</charset>
         </encoder>
         <!-- 此日志文件记录error级别的 -->
@@ -62,12 +70,14 @@
             <!-- 归档的日志文件的路径,例如今天1992-11-06日志,当前写的日志文件路径为file节点指定,
             可以将此文件与file指定文件路径设置为不同路径,从而将当前日志文件或归档日志文件置不同的目录。
             而1992-11-06的日志文件在由fileNamePattern指定。%d{yyyy-MM-dd}指定日期格式,%i指定索引 -->
-            <fileNamePattern>${LOG_PATH}/${APPDIR}/warn/${APPDIR}-warn-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <fileNamePattern>${LOG_PATH}/${APPDIR}/warn/${APPDIR}-warn-%d{yyyy-MM-dd}.%i.log
+            </fileNamePattern>
             <!--  保留日志天数 -->
             <maxHistory>15</maxHistory>
             <!-- 除按日志记录之外,还配置了日志文件不能超过10MB,若超过10MB,日志文件会以索引0开始,
             命名日志文件,例如log-warn-1992-11-06.0.log -->
-            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+            <timeBasedFileNamingAndTriggeringPolicy
+                    class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                 <maxFileSize>10MB</maxFileSize>
             </timeBasedFileNamingAndTriggeringPolicy>
         </rollingPolicy>
@@ -75,7 +85,8 @@
         <append>true</append>
         <!-- 日志文件的格式 -->
         <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
-            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level --- [%thread] %logger Line:%-3L - %msg%n</pattern>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level --- [%thread] %logger Line:%-3L - %msg%n
+            </pattern>
             <charset>utf-8</charset>
         </encoder>
         <!-- 此日志文件只记录warn级别的 -->
@@ -98,12 +109,14 @@
             <!-- 归档的日志文件的路径,例如今天是1992-11-06日志,当前写的日志文件路径为file节点指定,
             可以将此文件与file指定文件路径设置为不同路径,从而将当前日志文件或归档日志文件置不同的目录。
             而1992-11-06的日志文件在由fileNamePattern指定。%d{yyyy-MM-dd}指定日期格式,%i指定索引 -->
-            <fileNamePattern>${LOG_PATH}/${APPDIR}/info/${APPDIR}-info-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <fileNamePattern>${LOG_PATH}/${APPDIR}/info/${APPDIR}-info-%d{yyyy-MM-dd}.%i.log
+            </fileNamePattern>
             <!--  保留日志天数 -->
             <maxHistory>15</maxHistory>
             <!-- 除按日志记录之外,还配置了日志文件不能超过10MB,若超过10MB,日志文件会以索引0开始,
             命名日志文件,例如log-info-1992-11-06.0.log -->
-            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+            <timeBasedFileNamingAndTriggeringPolicy
+                    class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                 <maxFileSize>10MB</maxFileSize>
             </timeBasedFileNamingAndTriggeringPolicy>
         </rollingPolicy>
@@ -111,7 +124,8 @@
         <append>true</append>
         <!-- 日志文件的格式 -->
         <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
-            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level --- [%thread] %logger Line:%-3L - %msg%n</pattern>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level --- [%thread] %logger Line:%-3L - %msg%n
+            </pattern>
             <charset>utf-8</charset>
         </encoder>
         <!-- 此日志文件只记录info级别的 -->
@@ -134,12 +148,14 @@
             <!-- 归档的日志文件的路径,例如今天是1992-11-06日志,当前写的日志文件路径为file节点指定,
             可以将此文件与file指定文件路径设置为不同路径,从而将当前日志文件或归档日志文件置不同的目录。
             而1992-11-06的日志文件在由fileNamePattern指定。%d{yyyy-MM-dd}指定日期格式,%i指定索引 -->
-            <fileNamePattern>${LOG_PATH}/${APPDIR}/debug/${APPDIR}-debug-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <fileNamePattern>${LOG_PATH}/${APPDIR}/debug/${APPDIR}-debug-%d{yyyy-MM-dd}.%i.log
+            </fileNamePattern>
             <!--  保留日志天数 -->
             <maxHistory>15</maxHistory>
             <!-- 除按日志记录之外,还配置了日志文件不能超过10MB,若超过10MB,日志文件会以索引0开始,
             命名日志文件,例如log-debug-1992-11-06.0.log -->
-            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+            <timeBasedFileNamingAndTriggeringPolicy
+                    class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                 <maxFileSize>10MB</maxFileSize>
             </timeBasedFileNamingAndTriggeringPolicy>
         </rollingPolicy>
@@ -147,7 +163,8 @@
         <append>true</append>
         <!-- 日志文件的格式 -->
         <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
-            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level --- [%thread] %logger Line:%-3L - %msg%n</pattern>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level --- [%thread] %logger Line:%-3L - %msg%n
+            </pattern>
             <charset>utf-8</charset>
         </encoder>
         <!-- 此日志文件只记录debug级别的 -->
@@ -175,31 +192,31 @@
 
     <!--&lt;!&ndash;输出到mysql数据库的appender配置     &ndash;&gt;-->
     <!--<appender name="db" class="ch.qos.logback.classic.db.DBAppender">-->
-        <!--<connectionSource-->
-                <!--class="ch.qos.logback.core.db.DriverManagerConnectionSource">-->
-            <!--<driverClass>com.mysql.cj.jdbc.Driver</driverClass>-->
-            <!--<url>jdbc:mysql://120.77.222.42:3306/logback_member?characterEncoding=utf8</url>-->
-            <!--<user>root</user>-->
-            <!--<password>a123456789</password>-->
-        <!--</connectionSource>-->
+    <!--<connectionSource-->
+    <!--class="ch.qos.logback.core.db.DriverManagerConnectionSource">-->
+    <!--<driverClass>com.mysql.cj.jdbc.Driver</driverClass>-->
+    <!--<url>jdbc:mysql://120.77.222.42:3306/logback_member?characterEncoding=utf8</url>-->
+    <!--<user>root</user>-->
+    <!--<password>a123456789</password>-->
+    <!--</connectionSource>-->
     <!--</appender>-->
 
     <!-- FrameworkServlet日志-->
-    <logger name="org.springframework" level="WARN" />
+    <logger name="org.springframework" level="WARN"/>
 
     <!-- mybatis日志打印-->
-    <logger name="org.apache.ibatis" level="DEBUG" />
-    <logger name="java.sql" level="DEBUG" />
+    <logger name="org.apache.ibatis" level="DEBUG"/>
+    <logger name="java.sql" level="DEBUG"/>
 
     <!--  项目 mapper 路径
             console控制台显示sql语句:STDOUT.filter.level -> debug级别
     -->
-    <logger name="com.diagbot.mapper" level="DEBUG" />
+    <logger name="com.diagbot.mapper" level="DEBUG"/>
 
     <appender name="LOGSTASHDEV" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
         <destination>192.168.2.236:5044</destination>
         <!-- encoder必须配置,有多种可选 -->
-        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder" >
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
             <customFields>{"appname":"gateway-service"}</customFields>
         </encoder>
     </appender>
@@ -207,7 +224,23 @@
     <appender name="LOGSTASHTEST" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
         <destination>192.168.2.241:5044</destination>
         <!-- encoder必须配置,有多种可选 -->
-        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder" >
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"gateway-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"gateway-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
             <customFields>{"appname":"gateway-service"}</customFields>
         </encoder>
     </appender>
@@ -215,49 +248,59 @@
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
-            <appender-ref ref="ERROR" />
-            <appender-ref ref="WARN" />
-            <appender-ref ref="INFO" />
-            <appender-ref ref="DEBUG" />
-            <!-- 生产环境将请stdout去掉 -->
-            <appender-ref ref="STDOUT" />
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
 
     <!-- 开发环境下的日志配置 -->
     <springProfile name="dev">
         <root level="INFO">
-            <appender-ref ref="ERROR" />
-            <appender-ref ref="WARN" />
-            <appender-ref ref="INFO" />
-            <appender-ref ref="DEBUG" />
-            <!-- 生产环境将请stdout去掉 -->
-            <appender-ref ref="STDOUT" />
-            <appender-ref ref="LOGSTASHDEV" />
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHDEV"/>
         </root>
     </springProfile>
 
     <!-- 测试环境下的日志配置 -->
     <springProfile name="test">
         <root level="INFO">
-            <appender-ref ref="ERROR" />
-            <appender-ref ref="WARN" />
-            <appender-ref ref="INFO" />
-            <appender-ref ref="DEBUG" />
-            <!-- 生产环境将请stdout去掉 -->
-            <appender-ref ref="STDOUT" />
-            <appender-ref ref="LOGSTASHTEST" />
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHTEST"/>
+        </root>
+    </springProfile>
+
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
         </root>
     </springProfile>
 
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
-            <appender-ref ref="ERROR" />
-            <appender-ref ref="WARN" />
-            <appender-ref ref="INFO" />
-            <appender-ref ref="DEBUG" />
-            <appender-ref ref="LOGSTASH" />
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 32 - 6
icss-service/src/main/resources/logback-spring.xml

@@ -15,8 +15,8 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
-    包名输出缩进对齐
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
 
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"icss-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"icss-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 31 - 5
icssman-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"icssman-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"icssman-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 5 - 5
knowledgeman-service/pom.xml

@@ -175,11 +175,6 @@
             <version>2.6.10</version>
         </dependency>
 
-        <dependency>
-            <groupId>net.oschina.zcx7878</groupId>
-            <artifactId>fastdfs-client-java</artifactId>
-            <version>1.27.0.0</version>
-        </dependency>
         <!-- 文件上传相关架包 -->
 		<dependency>
 			<groupId>commons-fileupload</groupId>
@@ -202,6 +197,11 @@
             <groupId>org.apache.poi</groupId>
             <artifactId>poi-ooxml</artifactId>
         </dependency>
+
+        <dependency>
+            <groupId>com.squareup.okhttp3</groupId>
+            <artifactId>okhttp</artifactId>
+        </dependency>
 		
     </dependencies>
 

+ 0 - 161
knowledgeman-service/src/main/java/com/diagbot/client/fastdfs/FastDFSClient.java

@@ -1,161 +0,0 @@
-package com.diagbot.client.fastdfs;
-
-import org.csource.common.NameValuePair;
-import org.csource.fastdfs.ClientGlobal;
-import org.csource.fastdfs.FileInfo;
-import org.csource.fastdfs.ServerInfo;
-import org.csource.fastdfs.StorageClient;
-import org.csource.fastdfs.StorageServer;
-import org.csource.fastdfs.TrackerClient;
-import org.csource.fastdfs.TrackerServer;
-import org.slf4j.LoggerFactory;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * @Description: FastDFS 客户端
- * @author: gaodm
- * @time: 2018/11/13 13:55
- */
-public class FastDFSClient {
-    private static org.slf4j.Logger logger = LoggerFactory.getLogger(FastDFSClient.class);
-
-    public static String[] upload(FastDFSFile file) {
-        logger.info("File Name: " + file.getName() + "File Length:" + file.getContent().length);
-
-        // 设置元信息
-        NameValuePair[] meta_list = new NameValuePair[2];
-        meta_list[0] = new NameValuePair("name", file.getName());
-        meta_list[1] = new NameValuePair("ext", file.getExt());
-
-        long startTime = System.currentTimeMillis();
-        String[] uploadResults = null;
-        StorageClient storageClient = null;
-        try {
-            storageClient = getTrackerClient();
-            uploadResults = storageClient.upload_file(file.getContent(), file.getExt(), meta_list);
-        } catch (IOException e) {
-            logger.error("IO Exception when uploadind the file:" + file.getName(), e);
-        } catch (Exception e) {
-            logger.error("Non IO Exception when uploadind the file:" + file.getName(), e);
-        }
-        logger.info("upload_file time used:" + (System.currentTimeMillis() - startTime) + " ms");
-
-        if (uploadResults == null && storageClient != null) {
-            logger.error("upload file fail, error code:" + storageClient.getErrorCode());
-        }
-        String groupName = uploadResults[0];
-        String remoteFileName = uploadResults[1];
-
-        logger.info("upload file successfully!!!" + "group_name:" + groupName + ", remoteFileName:" + " " + remoteFileName);
-        return uploadResults;
-    }
-
-    public static FileInfo getFile(String groupName, String remoteFileName) {
-        try {
-            StorageClient storageClient = getTrackerClient();
-            return storageClient.get_file_info(groupName, remoteFileName);
-        } catch (IOException e) {
-            logger.error("IO Exception: Get File from Fast DFS failed", e);
-        } catch (Exception e) {
-            logger.error("Non IO Exception: Get File from Fast DFS failed", e);
-        }
-        return null;
-    }
-
-    public static InputStream downFile(String groupName, String remoteFileName) {
-        try {
-            StorageClient storageClient = getTrackerClient();
-            byte[] fileByte = storageClient.download_file(groupName, remoteFileName);
-            InputStream ins = new ByteArrayInputStream(fileByte);
-            return ins;
-        } catch (IOException e) {
-            logger.error("IO Exception: Get File from Fast DFS failed", e);
-        } catch (Exception e) {
-            logger.error("Non IO Exception: Get File from Fast DFS failed", e);
-        }
-        return null;
-    }
-
-    public static void deleteFile(String groupName, String remoteFileName)
-            throws Exception {
-        StorageClient storageClient = getTrackerClient();
-        int i = storageClient.delete_file(groupName, remoteFileName);
-        logger.info("delete file successfully!!!" + i);
-    }
-
-    public static StorageServer[] getStoreStorages(String groupName)
-            throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        return trackerClient.getStoreStorages(trackerServer, groupName);
-    }
-
-    public static ServerInfo[] getFetchStorages(String groupName,
-                                                String remoteFileName) throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        return trackerClient.getFetchStorages(trackerServer, groupName, remoteFileName);
-    }
-
-    public static String getTrackerUrl() throws IOException {
-        return "http://" + getTrackerServer().getInetSocketAddress().getHostString() + ":" + ClientGlobal.getG_tracker_http_port() + "/";
-    }
-
-    private static StorageClient getTrackerClient() throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        StorageServer storageServer = getStorageServer(getStorageServerIp(trackerClient, trackerServer));
-        StorageClient storageClient
-                = new StorageClient(trackerServer, storageServer);
-        return storageClient;
-    }
-
-    private static TrackerServer getTrackerServer() throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        return trackerServer;
-    }
-
-    /**
-     * 获得可用的storage IP
-     *
-     * @param trackerClient
-     * @param trackerServer
-     * @return 返回storage IP
-     */
-    private static String getStorageServerIp(TrackerClient trackerClient, TrackerServer trackerServer) {
-        String storageIp = null;
-        if (trackerClient != null && trackerServer != null) {
-            try {
-                StorageServer storageServer = trackerClient.getStoreStorage(trackerServer, "group1");
-                storageIp = storageServer.getSocket().getInetAddress().getHostAddress();
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-        logger.info("获取组中可用的storage IP:" + storageIp);
-        return storageIp;
-    }
-
-    /**
-     * 得到Storage服务
-     *
-     * @param storageIp
-     * @return 返回Storage服务
-     */
-    private static StorageServer getStorageServer(String storageIp) {
-        StorageServer storageServer = null;
-        if (storageIp != null && !("").equals(storageIp)) {
-            try {
-                storageServer = new StorageServer(storageIp, Integer.parseInt("23000"), Integer.parseInt("0"));
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-        logger.info("storage server生成");
-        return storageServer;
-    }
-}

+ 0 - 70
knowledgeman-service/src/main/java/com/diagbot/client/fastdfs/FastDFSFile.java

@@ -1,70 +0,0 @@
-package com.diagbot.client.fastdfs;
-
-public class FastDFSFile {
-    private String name;
-
-    private byte[] content;
-
-    private String ext;
-
-    private String md5;
-
-    private String author;
-
-    public FastDFSFile(String name, byte[] content, String ext, String height,
-                       String width, String author) {
-        super();
-        this.name = name;
-        this.content = content;
-        this.ext = ext;
-        this.author = author;
-    }
-
-    public FastDFSFile(String name, byte[] content, String ext) {
-        super();
-        this.name = name;
-        this.content = content;
-        this.ext = ext;
-
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public byte[] getContent() {
-        return content;
-    }
-
-    public void setContent(byte[] content) {
-        this.content = content;
-    }
-
-    public String getExt() {
-        return ext;
-    }
-
-    public void setExt(String ext) {
-        this.ext = ext;
-    }
-
-    public String getMd5() {
-        return md5;
-    }
-
-    public void setMd5(String md5) {
-        this.md5 = md5;
-    }
-
-    public String getAuthor() {
-        return author;
-    }
-
-    public void setAuthor(String author) {
-        this.author = author;
-    }
-}

+ 0 - 49
knowledgeman-service/src/main/java/com/diagbot/config/FastDFSConfigurer.java

@@ -1,49 +0,0 @@
-package com.diagbot.config;
-
-import lombok.extern.slf4j.Slf4j;
-import org.csource.fastdfs.ClientGlobal;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Properties;
-
-/**
- * @Description:
- * @author: gaodm
- * @time: 2019/3/1 14:15
- */
-@Configuration
-@Slf4j
-public class FastDFSConfigurer {
-    @Value("${fastdfs.connect_timeout_in_seconds}")
-    private String connectTimeout;
-    @Value("${fastdfs.network_timeout_in_seconds}")
-    private String networkTimeout;
-    @Value("${fastdfs.charset}")
-    private String charset;
-    @Value("${fastdfs.http_tracker_http_port}")
-    private String httpTrackerHttpPort;
-    @Value("${fastdfs.http_anti_steal_token}")
-    private String httpAntiStealToken;
-    @Value("${fastdfs.tracker_servers}")
-    private String trackerServers;
-
-    @Bean
-    public Integer fastDFSInit(){
-        try {
-            Properties props = new Properties();
-            props.put(ClientGlobal.PROP_KEY_CONNECT_TIMEOUT_IN_SECONDS, connectTimeout);
-            props.put(ClientGlobal.PROP_KEY_NETWORK_TIMEOUT_IN_SECONDS, networkTimeout);
-            props.put(ClientGlobal.PROP_KEY_CHARSET, charset);
-            props.put(ClientGlobal.PROP_KEY_HTTP_TRACKER_HTTP_PORT, httpTrackerHttpPort);
-            props.put(ClientGlobal.PROP_KEY_HTTP_ANTI_STEAL_TOKEN, httpAntiStealToken);
-            props.put(ClientGlobal.PROP_KEY_TRACKER_SERVERS, trackerServers);
-            ClientGlobal.initByProperties(props);
-
-        } catch (Exception e) {
-            log.error("FastDFS Client Init Fail!", e);
-        }
-        return 1;
-    }
-}

+ 1 - 0
knowledgeman-service/src/main/java/com/diagbot/dto/FileDTO.java

@@ -15,6 +15,7 @@ public class FileDTO {
     private String original;
     private String title;
     private String url;
+    private String md5;
     private String info;
 
     public FileDTO(String state, String info) {

+ 17 - 0
knowledgeman-service/src/main/java/com/diagbot/dto/FileDeleteDTO.java

@@ -0,0 +1,17 @@
+package com.diagbot.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * @Description:
+ * @author: gaodm
+ * @time: 2019/11/4 11:09
+ */
+@Getter
+@Setter
+public class FileDeleteDTO {
+    private Object data;
+    private String message;
+    private String status;
+}

+ 25 - 0
knowledgeman-service/src/main/java/com/diagbot/dto/FileUploadDTO.java

@@ -0,0 +1,25 @@
+package com.diagbot.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * @Description:
+ * @author: gaodm
+ * @time: 2019/11/4 10:55
+ */
+@Getter
+@Setter
+public class FileUploadDTO {
+    private String url;
+    private String md5;
+    private String path;
+    private String domain;
+    private String scene;
+    private int size;
+    private int mtime;
+    private String scenes;
+    private String retmsg;
+    private int retcode;
+    private String src;
+}

+ 3 - 1
knowledgeman-service/src/main/java/com/diagbot/facade/DiagnoseFacade.java

@@ -923,11 +923,13 @@ public class DiagnoseFacade extends DiagnoseServiceImpl {
      * @return
      */
     public void updateNeo(UpdateDiagnoseVO updateDiagnoseVO) {
+        Date date = new Date();
         // 更新时间
         this.update(new UpdateWrapper<Diagnose>()
                 .eq("id", updateDiagnoseVO.getId())
                 .set("modifier", updateDiagnoseVO.getModifier())
-                .set("neo_update", new Date())
+                .set("neo_update", date)
+                .set("gmt_modified", date)
         );
 
         // 更新图谱对接接口

+ 80 - 49
knowledgeman-service/src/main/java/com/diagbot/service/impl/UploadServiceImpl.java

@@ -1,16 +1,24 @@
 package com.diagbot.service.impl;
 
-import com.diagbot.client.fastdfs.FastDFSClient;
-import com.diagbot.client.fastdfs.FastDFSFile;
 import com.diagbot.dto.FileDTO;
+import com.diagbot.dto.FileDeleteDTO;
+import com.diagbot.dto.FileUploadDTO;
 import com.diagbot.service.UploadService;
+import com.diagbot.util.GsonUtil;
+import com.diagbot.util.StringUtil;
 import lombok.extern.slf4j.Slf4j;
+import okhttp3.FormBody;
+import okhttp3.MediaType;
+import okhttp3.MultipartBody;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+import okhttp3.ResponseBody;
+import org.springframework.beans.factory.annotation.Value;
 import org.springframework.stereotype.Service;
 import org.springframework.web.multipart.MultipartFile;
 
-import java.io.IOException;
-import java.io.InputStream;
-
 /**
  * @Description: 文件上传服务接口实现
  * @author: gaodm
@@ -19,6 +27,9 @@ import java.io.InputStream;
 @Slf4j
 @Service
 public class UploadServiceImpl implements UploadService {
+    @Value("${imageUrl.prefix}")
+    private String imagerUrl;
+
     @Override
     public FileDTO singleFileUpload(MultipartFile file) {
         if (file.isEmpty()) {
@@ -28,71 +39,91 @@ public class UploadServiceImpl implements UploadService {
         if (file.getSize() > 1024 * 1024) {
             return new FileDTO("FAILURE", "文件上传失败,超出大小限制1MB");
         }
+
+        String result = "";
         try {
-            FileDTO fileDTO = saveFile(file);
-            return fileDTO;
-        } catch (Exception e) {
-            log.error("文件上传失败", e);
-            return new FileDTO("FAILURE", "文件上传失败,请重新上传");
-        }
-    }
+            OkHttpClient httpClient = new OkHttpClient();
+            MultipartBody multipartBody = new MultipartBody.Builder().
+                    setType(MultipartBody.FORM)
+                    .addFormDataPart("file", file.getOriginalFilename(),
+                            RequestBody.create(MediaType.parse("multipart/form-data;charset=utf-8"),
+                                    file.getBytes()))
+                    .addFormDataPart("scene", "M00")
+                    .addFormDataPart("output", "json")
+                    .build();
 
+            Request request = new Request.Builder()
+                    .url(imagerUrl + "/group1/upload")
+                    .post(multipartBody)
+                    .build();
 
-    /**
-     * @param multipartFile
-     * @return
-     * @throws IOException
-     */
-    public FileDTO saveFile(MultipartFile multipartFile) throws IOException {
+            Response response = httpClient.newCall(request).execute();
+            if (response.isSuccessful()) {
+                ResponseBody body = response.body();
+                if (body != null) {
+                    result = body.string();
+                    //System.out.println(result);
+                }
+            }
 
-        String[] fileAbsolutePath = {};
-        String fileName = multipartFile.getOriginalFilename();
-        String ext = fileName.substring(fileName.lastIndexOf(".") + 1);
-        byte[] file_buff = null;
-        InputStream inputStream = multipartFile.getInputStream();
-        if (inputStream != null) {
-            int len1 = inputStream.available();
-            file_buff = new byte[len1];
-            inputStream.read(file_buff);
-        }
-        inputStream.close();
-        FastDFSFile file = new FastDFSFile(fileName, file_buff, ext);
-        try {
-            fileAbsolutePath = FastDFSClient.upload(file);  //upload to fastdfs
+            if (StringUtil.isBlank(result)) {
+                return new FileDTO("FAILURE", "文件上传失败,请重新上传");
+            }
         } catch (Exception e) {
-            log.error("文件上传异常", e);
-            return new FileDTO("FAILURE", "文件上传异常");
-        }
-        if (fileAbsolutePath == null) {
-            log.error("文件上传失败,请重新上传");
+            log.error("文件上传失败", e);
             return new FileDTO("FAILURE", "文件上传失败,请重新上传");
         }
-        String path = "/" + fileAbsolutePath[0] + "/" + fileAbsolutePath[1];
+
+        FileUploadDTO fileUploadDTO = GsonUtil.toObject(result, FileUploadDTO.class);
         FileDTO fileDTO = new FileDTO("SUCCESS", "文件上传成功");
-        fileDTO.setUrl(path);
-        fileDTO.setOriginal(multipartFile.getOriginalFilename());
-        fileDTO.setTitle(multipartFile.getOriginalFilename());
+        fileDTO.setUrl(fileUploadDTO.getPath());
+        fileDTO.setMd5(fileUploadDTO.getMd5());
+        fileDTO.setOriginal(file.getOriginalFilename());
+        fileDTO.setTitle(file.getOriginalFilename());
         return fileDTO;
     }
 
     /**
      * 删除服务端文件
      *
-     * @param path
+     * @param md5
      * @return
      */
-    public FileDTO deleteRemoteFile(String path) {
-        if (path.startsWith("/")) {
-            path = path.substring(1);
-        }
-        String fileName = path.substring(path.indexOf("/") + 1);
-        String groupName = path.substring(0, path.indexOf("/"));
+    public FileDTO deleteRemoteFile(String md5) {
+        String result = "";
         try {
-            FastDFSClient.deleteFile(groupName, fileName);
+            OkHttpClient httpClient = new OkHttpClient();
+            RequestBody formBody = new FormBody.Builder()
+                    .add("md5", md5)
+                    .build();
+
+            Request request = new Request.Builder()
+                    .url(imagerUrl + "/group1/delete")
+                    .post(formBody)
+                    .build();
+
+            Response response = httpClient.newCall(request).execute();
+            if (response.isSuccessful()) {
+                ResponseBody body = response.body();
+                if (body != null) {
+                    result = body.string();
+                    //System.out.println(result);
+                }
+            }
+
+            if (StringUtil.isBlank(result)) {
+                return new FileDTO("FAILURE", "文件删除失败");
+            }
+
         } catch (Exception e) {
             log.error("", e);
             return new FileDTO("FAILURE", "文件删除失败");
         }
+
+        FileDeleteDTO fileDeleteDTO = GsonUtil.toObject(result, FileDeleteDTO.class);
+        if (fileDeleteDTO.getStatus().equals("fail")) {
+            return new FileDTO("FAILURE", fileDeleteDTO.getMessage());
+        }
         return new FileDTO("SUCCESS", "文件删除成功");
     }
 }

+ 2 - 8
knowledgeman-service/src/main/java/com/diagbot/web/UploadController.java

@@ -3,8 +3,6 @@ package com.diagbot.web;
 import com.diagbot.dto.FileDTO;
 import com.diagbot.dto.RespDTO;
 import com.diagbot.facade.UploadFacade;
-import com.diagbot.util.FastJsonUtils;
-import com.diagbot.util.StringUtil;
 import io.swagger.annotations.Api;
 import io.swagger.annotations.ApiOperation;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -17,10 +15,6 @@ import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.RestController;
 import org.springframework.web.multipart.MultipartFile;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.File;
-
 @RestController
 @Api(value = "文件上传API", tags = { "知识库标准化-文件上传API" })
 @RequestMapping(value = "/file")
@@ -43,8 +37,8 @@ public class UploadController {
 
     @PostMapping("/deleteRemoteFile")
     @ApiOperation(value = "知识库标准化-文件删除")
-    public RespDTO<FileDTO> deleteRemoteFile(@RequestParam("path") String path) {
-        FileDTO data = uploadFacade.deleteRemoteFile(path);
+    public RespDTO<FileDTO> deleteRemoteFile(@RequestParam("md5") String md5) {
+        FileDTO data = uploadFacade.deleteRemoteFile(md5);
         if (data.getState().equals("FAILURE")) {
             return RespDTO.onError(data.getInfo());
         } else {

+ 31 - 5
knowledgeman-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"knowledgeman-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"knowledgeman-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 31 - 5
log-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"log-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"log-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 31 - 5
ltapi-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"ltapi-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"ltapi-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 31 - 5
monitor-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"monitor-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"monitor-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 7 - 0
pom.xml

@@ -56,6 +56,7 @@
         <logstash.version>5.2</logstash.version>
         <poi.version>4.1.0</poi.version>
         <aggregator.version>1.1.0</aggregator.version>
+        <okhttp.version>4.2.2</okhttp.version>
         <docker-maven-plugin.version>1.1.1</docker-maven-plugin.version>
         <docker.image.prefix>192.168.2.236:5000/diagbotcloud</docker.image.prefix>
     </properties>
@@ -171,6 +172,12 @@
                 <artifactId>spring-boot-data-aggregator-starter</artifactId>
                 <version>${aggregator.version}</version>
             </dependency>
+
+            <dependency>
+                <groupId>com.squareup.okhttp3</groupId>
+                <artifactId>okhttp</artifactId>
+                <version>${okhttp.version}</version>
+            </dependency>
         </dependencies>
     </dependencyManagement>
 

+ 9 - 5
prec-service/pom.xml

@@ -154,11 +154,6 @@
             <artifactId>spring-cloud-starter-stream-rabbit</artifactId>
         </dependency>
 
-        <dependency>
-            <groupId>net.oschina.zcx7878</groupId>
-            <artifactId>fastdfs-client-java</artifactId>
-            <version>1.27.0.0</version>
-        </dependency>
         <!-- 文件上传相关架包 -->
         <dependency>
             <groupId>commons-fileupload</groupId>
@@ -177,6 +172,15 @@
             <version>0.4.8</version>
         </dependency>
 
+        <dependency>
+            <groupId>io.github.lvyahui8</groupId>
+            <artifactId>spring-boot-data-aggregator-starter</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.squareup.okhttp3</groupId>
+            <artifactId>okhttp</artifactId>
+        </dependency>
     </dependencies>
 
     <build>

+ 103 - 0
prec-service/src/main/java/com/diagbot/aggregate/UploadAggregate.java

@@ -0,0 +1,103 @@
+package com.diagbot.aggregate;
+
+import com.diagbot.dto.FileDTO;
+import com.diagbot.dto.FileThumDTO;
+import com.diagbot.facade.UploadFacade;
+import com.diagbot.util.ListUtil;
+import io.github.lvyahui8.spring.annotation.DataConsumer;
+import io.github.lvyahui8.spring.annotation.DataProvider;
+import io.github.lvyahui8.spring.annotation.InvokeParameter;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.util.List;
+
+/**
+ * @Description:
+ * @author: gaodm
+ * @time: 2019/11/1 9:15
+ */
+@Component
+public class UploadAggregate {
+    @Autowired
+    private UploadFacade uploadFacade;
+
+    @DataProvider("thumUploads")
+    public List<FileDTO> setAll(
+            @DataConsumer("thumUpload1") FileDTO fileDTO1,
+            @DataConsumer("thumUpload2") FileDTO fileDTO2,
+            @DataConsumer("thumUpload3") FileDTO fileDTO3,
+            @DataConsumer("thumUpload4") FileDTO fileDTO4,
+            @DataConsumer("thumUpload5") FileDTO fileDTO5,
+            @DataConsumer("thumUpload6") FileDTO fileDTO6) {
+        List<FileDTO> fileThumDTOList = ListUtil.newArrayList();
+        if (null != fileDTO1){
+            fileThumDTOList.add(fileDTO1);
+        }
+
+        if (null != fileDTO2){
+            fileThumDTOList.add(fileDTO2);
+        }
+
+        if (null != fileDTO3){
+            fileThumDTOList.add(fileDTO3);
+        }
+
+        if (null != fileDTO4){
+            fileThumDTOList.add(fileDTO4);
+        }
+
+        if (null != fileDTO5){
+            fileThumDTOList.add(fileDTO5);
+        }
+        if (null != fileDTO6){
+            fileThumDTOList.add(fileDTO6);
+        }
+
+        return fileThumDTOList;
+    }
+
+    @DataProvider("thumUpload1")
+    public FileDTO thumUpload1(@InvokeParameter("file1") MultipartFile file,
+                                   @InvokeParameter("type1") Integer type) {
+        return this.singleFileThumUpload(file, type);
+    }
+
+    @DataProvider("thumUpload2")
+    public FileDTO thumUpload2(@InvokeParameter("file2") MultipartFile file,
+                                   @InvokeParameter("type2") Integer type) {
+        return this.singleFileThumUpload(file, type);
+    }
+
+    @DataProvider("thumUpload3")
+    public FileDTO thumUpload3(@InvokeParameter("file3") MultipartFile file,
+                                   @InvokeParameter("type3") Integer type) {
+        return this.singleFileThumUpload(file, type);
+    }
+
+    @DataProvider("thumUpload4")
+    public FileDTO thumUpload4(@InvokeParameter("file4") MultipartFile file,
+                                   @InvokeParameter("type4") Integer type) {
+        return this.singleFileThumUpload(file, type);
+    }
+
+    @DataProvider("thumUpload5")
+    public FileDTO thumUpload5(@InvokeParameter("file5") MultipartFile file,
+                                   @InvokeParameter("type5") Integer type) {
+        return this.singleFileThumUpload(file, type);
+    }
+
+    @DataProvider("thumUpload6")
+    public FileDTO thumUpload6(@InvokeParameter("file6") MultipartFile file,
+                                   @InvokeParameter("type6") Integer type) {
+        return this.singleFileThumUpload(file, type);
+    }
+
+    private FileDTO singleFileThumUpload(MultipartFile file, Integer type) {
+        if (file == null) {
+            return null;
+        }
+        return uploadFacade.singleFileUpload(file, type);
+    }
+}

+ 0 - 161
prec-service/src/main/java/com/diagbot/client/fastdfs/FastDFSClient.java

@@ -1,161 +0,0 @@
-package com.diagbot.client.fastdfs;
-
-import org.csource.common.NameValuePair;
-import org.csource.fastdfs.ClientGlobal;
-import org.csource.fastdfs.FileInfo;
-import org.csource.fastdfs.ServerInfo;
-import org.csource.fastdfs.StorageClient;
-import org.csource.fastdfs.StorageServer;
-import org.csource.fastdfs.TrackerClient;
-import org.csource.fastdfs.TrackerServer;
-import org.slf4j.LoggerFactory;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * @Description: FastDFS 客户端
- * @author: gaodm
- * @time: 2018/11/13 13:55
- */
-public class FastDFSClient {
-    private static org.slf4j.Logger logger = LoggerFactory.getLogger(FastDFSClient.class);
-
-    public static String[] upload(FastDFSFile file) {
-        logger.info("File Name: " + file.getName() + "File Length:" + file.getContent().length);
-
-        // 设置元信息
-        NameValuePair[] meta_list = new NameValuePair[2];
-        meta_list[0] = new NameValuePair("name", file.getName());
-        meta_list[1] = new NameValuePair("ext", file.getExt());
-
-        long startTime = System.currentTimeMillis();
-        String[] uploadResults = null;
-        StorageClient storageClient = null;
-        try {
-            storageClient = getTrackerClient();
-            uploadResults = storageClient.upload_file(file.getContent(), file.getExt(), meta_list);
-        } catch (IOException e) {
-            logger.error("IO Exception when uploadind the file:" + file.getName(), e);
-        } catch (Exception e) {
-            logger.error("Non IO Exception when uploadind the file:" + file.getName(), e);
-        }
-        logger.info("upload_file time used:" + (System.currentTimeMillis() - startTime) + " ms");
-
-        if (uploadResults == null && storageClient != null) {
-            logger.error("upload file fail, error code:" + storageClient.getErrorCode());
-        }
-        String groupName = uploadResults[0];
-        String remoteFileName = uploadResults[1];
-
-        logger.info("upload file successfully!!!" + "group_name:" + groupName + ", remoteFileName:" + " " + remoteFileName);
-        return uploadResults;
-    }
-
-    public static FileInfo getFile(String groupName, String remoteFileName) {
-        try {
-            StorageClient storageClient = getTrackerClient();
-            return storageClient.get_file_info(groupName, remoteFileName);
-        } catch (IOException e) {
-            logger.error("IO Exception: Get File from Fast DFS failed", e);
-        } catch (Exception e) {
-            logger.error("Non IO Exception: Get File from Fast DFS failed", e);
-        }
-        return null;
-    }
-
-    public static InputStream downFile(String groupName, String remoteFileName) {
-        try {
-            StorageClient storageClient = getTrackerClient();
-            byte[] fileByte = storageClient.download_file(groupName, remoteFileName);
-            InputStream ins = new ByteArrayInputStream(fileByte);
-            return ins;
-        } catch (IOException e) {
-            logger.error("IO Exception: Get File from Fast DFS failed", e);
-        } catch (Exception e) {
-            logger.error("Non IO Exception: Get File from Fast DFS failed", e);
-        }
-        return null;
-    }
-
-    public static void deleteFile(String groupName, String remoteFileName)
-            throws Exception {
-        StorageClient storageClient = getTrackerClient();
-        int i = storageClient.delete_file(groupName, remoteFileName);
-        logger.info("delete file successfully!!!" + i);
-    }
-
-    public static StorageServer[] getStoreStorages(String groupName)
-            throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        return trackerClient.getStoreStorages(trackerServer, groupName);
-    }
-
-    public static ServerInfo[] getFetchStorages(String groupName,
-                                                String remoteFileName) throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        return trackerClient.getFetchStorages(trackerServer, groupName, remoteFileName);
-    }
-
-    public static String getTrackerUrl() throws IOException {
-        return "http://" + getTrackerServer().getInetSocketAddress().getHostString() + ":" + ClientGlobal.getG_tracker_http_port() + "/";
-    }
-
-    private static StorageClient getTrackerClient() throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        StorageServer storageServer = getStorageServer(getStorageServerIp(trackerClient, trackerServer));
-        StorageClient storageClient
-                = new StorageClient(trackerServer, storageServer);
-        return storageClient;
-    }
-
-    private static TrackerServer getTrackerServer() throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        return trackerServer;
-    }
-
-    /**
-     * 获得可用的storage IP
-     *
-     * @param trackerClient
-     * @param trackerServer
-     * @return 返回storage IP
-     */
-    private static String getStorageServerIp(TrackerClient trackerClient, TrackerServer trackerServer) {
-        String storageIp = null;
-        if (trackerClient != null && trackerServer != null) {
-            try {
-                StorageServer storageServer = trackerClient.getStoreStorage(trackerServer, "group1");
-                storageIp = storageServer.getSocket().getInetAddress().getHostAddress();
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-        logger.info("获取组中可用的storage IP:" + storageIp);
-        return storageIp;
-    }
-
-    /**
-     * 得到Storage服务
-     *
-     * @param storageIp
-     * @return 返回Storage服务
-     */
-    private static StorageServer getStorageServer(String storageIp) {
-        StorageServer storageServer = null;
-        if (storageIp != null && !("").equals(storageIp)) {
-            try {
-                storageServer = new StorageServer(storageIp, Integer.parseInt("23000"), Integer.parseInt("1"));
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-        logger.info("storage server生成");
-        return storageServer;
-    }
-}

+ 0 - 70
prec-service/src/main/java/com/diagbot/client/fastdfs/FastDFSFile.java

@@ -1,70 +0,0 @@
-package com.diagbot.client.fastdfs;
-
-public class FastDFSFile {
-    private String name;
-
-    private byte[] content;
-
-    private String ext;
-
-    private String md5;
-
-    private String author;
-
-    public FastDFSFile(String name, byte[] content, String ext, String height,
-                       String width, String author) {
-        super();
-        this.name = name;
-        this.content = content;
-        this.ext = ext;
-        this.author = author;
-    }
-
-    public FastDFSFile(String name, byte[] content, String ext) {
-        super();
-        this.name = name;
-        this.content = content;
-        this.ext = ext;
-
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public byte[] getContent() {
-        return content;
-    }
-
-    public void setContent(byte[] content) {
-        this.content = content;
-    }
-
-    public String getExt() {
-        return ext;
-    }
-
-    public void setExt(String ext) {
-        this.ext = ext;
-    }
-
-    public String getMd5() {
-        return md5;
-    }
-
-    public void setMd5(String md5) {
-        this.md5 = md5;
-    }
-
-    public String getAuthor() {
-        return author;
-    }
-
-    public void setAuthor(String author) {
-        this.author = author;
-    }
-}

+ 0 - 49
prec-service/src/main/java/com/diagbot/config/FastDFSConfigurer.java

@@ -1,49 +0,0 @@
-package com.diagbot.config;
-
-import lombok.extern.slf4j.Slf4j;
-import org.csource.fastdfs.ClientGlobal;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Properties;
-
-/**
- * @Description:
- * @author: gaodm
- * @time: 2019/3/1 14:15
- */
-@Configuration
-@Slf4j
-public class FastDFSConfigurer {
-    @Value("${fastdfs.connect_timeout_in_seconds}")
-    private String connectTimeout;
-    @Value("${fastdfs.network_timeout_in_seconds}")
-    private String networkTimeout;
-    @Value("${fastdfs.charset}")
-    private String charset;
-    @Value("${fastdfs.http_tracker_http_port}")
-    private String httpTrackerHttpPort;
-    @Value("${fastdfs.http_anti_steal_token}")
-    private String httpAntiStealToken;
-    @Value("${fastdfs.tracker_servers}")
-    private String trackerServers;
-
-    @Bean
-    public Integer fastDFSInit(){
-        try {
-            Properties props = new Properties();
-            props.put(ClientGlobal.PROP_KEY_CONNECT_TIMEOUT_IN_SECONDS, connectTimeout);
-            props.put(ClientGlobal.PROP_KEY_NETWORK_TIMEOUT_IN_SECONDS, networkTimeout);
-            props.put(ClientGlobal.PROP_KEY_CHARSET, charset);
-            props.put(ClientGlobal.PROP_KEY_HTTP_TRACKER_HTTP_PORT, httpTrackerHttpPort);
-            props.put(ClientGlobal.PROP_KEY_HTTP_ANTI_STEAL_TOKEN, httpAntiStealToken);
-            props.put(ClientGlobal.PROP_KEY_TRACKER_SERVERS, trackerServers);
-            ClientGlobal.initByProperties(props);
-
-        } catch (Exception e) {
-            log.error("FastDFS Client Init Fail!", e);
-        }
-        return 1;
-    }
-}

+ 3 - 9
prec-service/src/main/java/com/diagbot/dto/FileDTO.java

@@ -15,21 +15,15 @@ public class FileDTO {
     private String original;
     private String title;
     private String url;
+    private String md5;
     private String info;
     private Integer type;
 
-    public FileDTO(String state, String info, Integer type) {
-        this.state = state;
-        this.info = info;
-        this.type = type;
-    }
-
     public FileDTO(String state, String info) {
         this.state = state;
         this.info = info;
     }
-
-    public FileDTO() {
+    public FileDTO(){
 
     }
-}
+}

+ 17 - 0
prec-service/src/main/java/com/diagbot/dto/FileDeleteDTO.java

@@ -0,0 +1,17 @@
+package com.diagbot.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * @Description:
+ * @author: gaodm
+ * @time: 2019/11/4 11:09
+ */
+@Getter
+@Setter
+public class FileDeleteDTO {
+    private Object data;
+    private String message;
+    private String status;
+}

+ 25 - 0
prec-service/src/main/java/com/diagbot/dto/FileUploadDTO.java

@@ -0,0 +1,25 @@
+package com.diagbot.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * @Description:
+ * @author: gaodm
+ * @time: 2019/11/4 10:55
+ */
+@Getter
+@Setter
+public class FileUploadDTO {
+    private String url;
+    private String md5;
+    private String path;
+    private String domain;
+    private String scene;
+    private int size;
+    private int mtime;
+    private String scenes;
+    private String retmsg;
+    private int retcode;
+    private String src;
+}

+ 1 - 7
prec-service/src/main/java/com/diagbot/dto/GetInquiryDetailImgDTO.java

@@ -19,10 +19,4 @@ public class GetInquiryDetailImgDTO {
 	@ApiModelProperty(value="原图")
     private String originalImage;
 
-    /**
-     * 缩略图
-     */
-	@ApiModelProperty(value="缩略图")
-    private String narrowImage;
-	
-}
+}

+ 1 - 4
prec-service/src/main/java/com/diagbot/facade/InquiryInfoFacade.java

@@ -107,6 +107,7 @@ public class InquiryInfoFacade extends InquiryInfoServiceImpl {
         List<InquiryReport> inquiryReportList = BeanUtil.listCopyTo(saveInquiryVO.getReportList(), InquiryReport.class);
         inquiryReportList.forEach(i -> {
             i.setInquiryId(inquiryId);
+            i.setNarrowImage("0");
             i.setGmtCreate(now);
             i.setGmtModified(now);
         });
@@ -316,10 +317,6 @@ public class InquiryInfoFacade extends InquiryInfoServiceImpl {
                 if (StringUtil.isNotBlank(inquiryReport.getOriginalImage())) {
                     inquiryReport.setOriginalImage(imageUrlPrefix + inquiryReport.getOriginalImage());
                 }
-
-                if (StringUtil.isNotBlank(inquiryReport.getNarrowImage())) {
-                    inquiryReport.setNarrowImage(imageUrlPrefix + inquiryReport.getNarrowImage());
-                }
             }
             getInquiryDetailDTO.setImageList(BeanUtil.listCopyTo(inquiryReportList, GetInquiryDetailImgDTO.class));
         }

+ 5 - 8
prec-service/src/main/java/com/diagbot/service/UploadService.java

@@ -12,15 +12,12 @@ import java.util.List;
  * @time: 2018/11/13 13:50
  */
 public interface UploadService {
-    //单文件上传
+
     FileDTO singleFileUpload(MultipartFile file);
 
-    //单文件上传同时生成缩略图
-    FileThumDTO singleFileThumUpload(MultipartFile file);
+    //多文件上传(多线程)
+    List<FileDTO> multiFileThumUpload(MultipartFile[] mpfs, Integer[] type);
 
-    //多文件上传
-    List<FileDTO> multiFileUpload(MultipartFile[] mpfs);
+    FileDTO singleFileUpload(MultipartFile file, Integer type);
 
-    //多文件上传同时生成缩略图
-    List<FileThumDTO> multiFileThumUpload(MultipartFile[] mpfs, Integer[] type);
-}
+}

+ 121 - 185
prec-service/src/main/java/com/diagbot/service/impl/UploadServiceImpl.java

@@ -1,23 +1,33 @@
 package com.diagbot.service.impl;
 
-import com.diagbot.client.fastdfs.FastDFSClient;
-import com.diagbot.client.fastdfs.FastDFSFile;
 import com.diagbot.dto.FileDTO;
+import com.diagbot.dto.FileDeleteDTO;
 import com.diagbot.dto.FileThumDTO;
+import com.diagbot.dto.FileUploadDTO;
 import com.diagbot.exception.CommonErrorCode;
 import com.diagbot.exception.CommonException;
 import com.diagbot.service.UploadService;
+import com.diagbot.util.GsonUtil;
+import com.diagbot.util.StringUtil;
+import io.github.lvyahui8.spring.aggregate.facade.DataBeanAggregateQueryFacade;
 import lombok.extern.slf4j.Slf4j;
-import net.coobird.thumbnailator.Thumbnails;
+import okhttp3.FormBody;
+import okhttp3.MediaType;
+import okhttp3.MultipartBody;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+import okhttp3.ResponseBody;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
 import org.springframework.stereotype.Service;
 import org.springframework.web.multipart.MultipartFile;
 
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 /**
  * @Description: 文件上传服务接口实现
@@ -27,215 +37,141 @@ import java.util.List;
 @Slf4j
 @Service
 public class UploadServiceImpl implements UploadService {
+    @Autowired
+    private DataBeanAggregateQueryFacade dataBeanAggregateQueryFacade;
+
+    @Value("${imageUrl.prefix}")
+    private String imagerUrl;
+
     @Override
     public FileDTO singleFileUpload(MultipartFile file) {
         if (file.isEmpty()) {
             return new FileDTO("FAILURE", "文件不能为空");
         }
         //文件大小上限4M
-        if (file.getSize() > 4 * 1024 * 1024) {
+        if (file.getSize() > 1024 * 1024 * 4) {
             return new FileDTO("FAILURE", "文件上传失败,超出大小限制4MB");
         }
+
+        String result = "";
         try {
-            FileDTO fileDTO = saveFile(file, false);
-            return fileDTO;
+            OkHttpClient httpClient = new OkHttpClient();
+            MultipartBody multipartBody = new MultipartBody.Builder().
+                    setType(MultipartBody.FORM)
+                    .addFormDataPart("file", file.getOriginalFilename(),
+                            RequestBody.create(MediaType.parse("multipart/form-data;charset=utf-8"),
+                                    file.getBytes()))
+                    .addFormDataPart("scene", "M01")
+                    .addFormDataPart("output", "json")
+                    .build();
+
+            Request request = new Request.Builder()
+                    .url(imagerUrl + "/group1/upload")
+                    .post(multipartBody)
+                    .build();
+
+            Response response = httpClient.newCall(request).execute();
+            if (response.isSuccessful()) {
+                ResponseBody body = response.body();
+                if (body != null) {
+                    result = body.string();
+                    //System.out.println(result);
+                }
+            }
+
+            if (StringUtil.isBlank(result)) {
+                return new FileDTO("FAILURE", "文件上传失败,请重新上传");
+            }
         } catch (Exception e) {
             log.error("文件上传失败", e);
             return new FileDTO("FAILURE", "文件上传失败,请重新上传");
         }
-    }
 
-    @Override
-    public FileThumDTO singleFileThumUpload(MultipartFile file){
-        FileThumDTO fileThumDTO = new FileThumDTO();
-        if (file.isEmpty()) {
-            fileThumDTO.setSource(new FileDTO("FAILURE", "文件不能为空"));
-            return fileThumDTO;
-        }
-        //文件大小上限4M
-        if (file.getSize() > 4 * 1024 * 1024) {
-            fileThumDTO.setSource(new FileDTO("FAILURE", "文件上传失败,超出大小限制4MB"));
-            return fileThumDTO;
-        }
-        try {
-            fileThumDTO = saveFileWithThum(file);
-            return fileThumDTO;
-        } catch (Exception e) {
-            log.error("文件上传失败", e);
-            fileThumDTO.setSource(new FileDTO("FAILURE", "文件上传失败,请重新上传"));
-            return fileThumDTO;
-        }
+        FileUploadDTO fileUploadDTO = GsonUtil.toObject(result, FileUploadDTO.class);
+        FileDTO fileDTO = new FileDTO("SUCCESS", "文件上传成功");
+        fileDTO.setUrl(fileUploadDTO.getPath());
+        fileDTO.setMd5(fileUploadDTO.getMd5());
+        fileDTO.setOriginal(file.getOriginalFilename());
+        fileDTO.setTitle(file.getOriginalFilename());
+        return fileDTO;
     }
 
-    @Override
-    public List<FileDTO> multiFileUpload(MultipartFile[] mpfs) {
-        // 上传文件返回的路径集合
-        List<FileDTO> fileDTOS = new ArrayList<>();
-        if (null == mpfs) {
-            throw new CommonException(CommonErrorCode.PARAM_IS_NULL, "文件不能为空");
-        }
-
-        for (MultipartFile file : mpfs) {
-            if (file.isEmpty()) {
-                fileDTOS.add(new FileDTO("FAILURE", "文件不能为空"));
-                continue;
-            }
-            //文件大小上限4M
-            if (file.getSize() > 4 * 1024 * 1024) {
-                fileDTOS.add(new FileDTO("FAILURE", "文件上传失败,超出大小限制4MB"));
-                continue;
+    /**
+     * 删除服务端文件
+     *
+     * @param md5
+     * @return
+     */
+    public FileDTO deleteRemoteFile(String md5) {
+        String result = "";
+        try {
+            OkHttpClient httpClient = new OkHttpClient();
+            RequestBody formBody = new FormBody.Builder()
+                    .add("md5", md5)
+                    .build();
+
+            Request request = new Request.Builder()
+                    .url(imagerUrl + "/group1/delete")
+                    .post(formBody)
+                    .build();
+
+            Response response = httpClient.newCall(request).execute();
+            if (response.isSuccessful()) {
+                ResponseBody body = response.body();
+                if (body != null) {
+                    result = body.string();
+                    //System.out.println(result);
+                }
             }
 
-            try {
-                FileDTO fileDTO = saveFile(file, false);
-                fileDTOS.add(fileDTO);
-            } catch (Exception e) {
-                log.error("文件上传失败", e);
-                fileDTOS.add(new FileDTO("FAILURE", "文件上传失败,请重新上传"));
+            if (StringUtil.isBlank(result)) {
+                return new FileDTO("FAILURE", "文件删除失败");
             }
-        }
-        return fileDTOS;
-    }
 
-
-    @Override
-    public List<FileThumDTO> multiFileThumUpload(MultipartFile[] mpfs, Integer[] type) {
-        // 上传文件返回的路径集合
-        List<FileThumDTO> fileDTOS = new ArrayList<>();
-        if (null == mpfs) {
-            throw new CommonException(CommonErrorCode.PARAM_IS_NULL, "文件不能为空");
+        } catch (Exception e) {
+            log.error("", e);
+            return new FileDTO("FAILURE", "文件删除失败");
         }
 
-        FileThumDTO fileThumDTO = null;
-        for (int i = 0; i < mpfs.length; i++) {
-            MultipartFile file = mpfs[i];
-            if (file.isEmpty()) {
-                fileThumDTO = new FileThumDTO();
-                fileThumDTO.setSource(new FileDTO("FAILURE", "文件不能为空", type[i]));
-                fileDTOS.add(fileThumDTO);
-                continue;
-            }
-            //文件大小上限4M
-            if (file.getSize() > 4 * 1024 * 1024) {
-                fileThumDTO = new FileThumDTO();
-                fileThumDTO.setSource(new FileDTO("FAILURE", "文件上传失败,超出大小限制4MB", type[i]));
-                fileDTOS.add(fileThumDTO);
-                continue;
-            }
-
-            try {
-                fileThumDTO = saveFileWithThum(file);
-                fileThumDTO.getSource().setType(type[i]);
-                fileThumDTO.getThumbnail().setType(type[i]);
-                fileDTOS.add(fileThumDTO);
-            } catch (Exception e) {
-                log.error("文件上传失败", e);
-                fileThumDTO = new FileThumDTO();
-                fileThumDTO.setSource(new FileDTO("FAILURE", "文件上传失败,请重新上传", type[i]));
-                fileDTOS.add(fileThumDTO);
-            }
+        FileDeleteDTO fileDeleteDTO = GsonUtil.toObject(result, FileDeleteDTO.class);
+        if (fileDeleteDTO.getStatus().equals("fail")) {
+            return new FileDTO("FAILURE", fileDeleteDTO.getMessage());
         }
-        return fileDTOS;
-    }
-
-    /**
-     * @param multipartFile
-     * @return
-     * @throws IOException
-     */
-    public FileThumDTO saveFileWithThum(MultipartFile multipartFile) throws IOException {
-        FileThumDTO fileThumDTO = new FileThumDTO();
-        //原图上传到服务器
-        FileDTO source = saveFile(multipartFile);
-        fileThumDTO.setSource(source);
-        //压缩图片
-        FileDTO thumbnail = saveFile(multipartFile, true);
-        fileThumDTO.setThumbnail(thumbnail);
-        return fileThumDTO;
-    }
-
-    /**
-     * @param multipartFile
-     * @return
-     * @throws IOException
-     */
-    public FileDTO saveFile(MultipartFile multipartFile) throws IOException {
-        return saveFile(multipartFile, false);
+        return new FileDTO("SUCCESS", "文件删除成功");
     }
 
-    /**
-     * @param multipartFile
-     * @return
-     * @throws IOException
-     */
-    public FileDTO saveFile(MultipartFile multipartFile, Boolean isThum) throws IOException {
-        String[] fileAbsolutePath = {};
-        String fileName = multipartFile.getOriginalFilename();
-        String ext = fileName.substring(fileName.lastIndexOf(".") + 1);
-        byte[] file_buff = null;
-        InputStream inputStream = multipartFile.getInputStream();
-        //压缩图片
-        if (isThum) {
-            Integer width = 280;
-            Integer hight = 430;
-            //文件名重新命名
-            fileName = width + "_" + hight + "_" + fileName;
-            inputStream = compress(inputStream, width, hight);
-        }
 
-        if (inputStream != null) {
-            int len1 = inputStream.available();
-            file_buff = new byte[len1];
-            inputStream.read(file_buff);
-        }
-        inputStream.close();
-        FastDFSFile file = new FastDFSFile(fileName, file_buff, ext);
+    @Override
+    public List<FileDTO> multiFileThumUpload(MultipartFile[] mpfs, Integer[] type) {
+        // 上传文件返回的路径集合
+        List<FileDTO> fileDTOS = new ArrayList<>();
         try {
-            fileAbsolutePath = FastDFSClient.upload(file);  //upload to fastdfs
+            Map<String, Object> invokeParams = new HashMap<>();
+            for (int i = 0; i < 6; i++) {
+                int j = i + 1;
+                String fileName = "file" + j;
+                String typeName = "type" + j;
+                if (null != mpfs && null != type) {
+                    if (i < mpfs.length && i < type.length
+                            && null != mpfs[i] && null != type[i]) {
+                        invokeParams.put(fileName, mpfs[i]);
+                        invokeParams.put(typeName, type[i]);
+                    }
+                }
+            }
+            fileDTOS
+                    = dataBeanAggregateQueryFacade.get("thumUploads", invokeParams, List.class);
         } catch (Exception e) {
-            log.error("文件上传异常", e);
-            return new FileDTO("FAILURE", "文件上传异常");
+            throw new CommonException(CommonErrorCode.SERVER_IS_ERROR);
         }
-        if (fileAbsolutePath == null) {
-            log.error("文件上传失败,请重新上传");
-            return new FileDTO("FAILURE", "文件上传失败,请重新上传");
-        }
-        String path = "/" + fileAbsolutePath[0] + "/" + fileAbsolutePath[1];
-        FileDTO fileDTO = new FileDTO("SUCCESS", "文件上传成功");
-        fileDTO.setUrl(path);
-        fileDTO.setOriginal(fileName);
-        fileDTO.setTitle(fileName);
-        return fileDTO;
-
+        return fileDTOS;
     }
 
-    public ByteArrayInputStream compress(InputStream inputStream, Integer width, Integer hight) throws IOException {
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        Thumbnails.of(inputStream)
-                .size(width, hight)
-                .toOutputStream(baos);
-        final ByteArrayInputStream swapStream = new ByteArrayInputStream(baos.toByteArray());
-        return swapStream;
-    }
 
-    /**
-     * 删除服务端文件
-     *
-     * @param path
-     * @return
-     */
-    public FileDTO deleteRemoteFile(String path) {
-        if (path.startsWith("/")) {
-            path = path.substring(1);
-        }
-        String fileName = path.substring(path.indexOf("/") + 1);
-        String groupName = path.substring(0, path.indexOf("/"));
-        try {
-            FastDFSClient.deleteFile(groupName, fileName);
-        } catch (Exception e) {
-            log.error("", e);
-            return new FileDTO("FAILURE", "文件删除失败");
-        }
-        return new FileDTO("SUCCESS", "文件删除成功");
+    @Override
+    public FileDTO singleFileUpload(MultipartFile file, Integer type) {
+        FileDTO fileDTO = singleFileUpload(file);
+        fileDTO.setType(type);
+        return fileDTO;
     }
 }

+ 0 - 7
prec-service/src/main/java/com/diagbot/vo/SaveInquiryReportVO.java

@@ -23,13 +23,6 @@ public class SaveInquiryReportVO {
     @NotBlank(message="原图不能为空")
     private String originalImage;
 
-    /**
-     * 缩略图
-     */
-    @ApiModelProperty(value="缩略图",required=true)
-    @NotBlank(message="缩略图不能为空")
-    private String narrowImage;
-
     /**
      * 图片排序号
      */

+ 12 - 42
prec-service/src/main/java/com/diagbot/web/UploadController.java

@@ -43,52 +43,29 @@ public class UploadController {
         }
     }
 
-    @ApiOperation(value = "智能预问诊-单个文件上传同时生成缩略图")
-    @CrossOrigin(allowCredentials = "true", allowedHeaders = "*", methods = { RequestMethod.POST }, origins = "*")
-    @PostMapping(value = "/uploadImageThum", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
-    public RespDTO<FileThumDTO> singleFileThumUpload(@RequestParam("upfile") MultipartFile file) {
-        FileThumDTO data = uploadFacade.singleFileThumUpload(file);
-        if (null != data.getSource() &&
-                data.getSource().getState().equals("FAILURE")) {
-            return RespDTO.onError(data.getSource().getInfo());
-        } else {
-            return RespDTO.onSuc(data);
-        }
-    }
 
-    @ApiOperation(value = "智能预问诊-多个文件上传")
-    @CrossOrigin(allowCredentials = "true", allowedHeaders = "*", methods = { RequestMethod.POST }, origins = "*")
-    @PostMapping(value = "/uploadImages", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
-    public RespDTO<List<FileDTO>> multiFileUpload(@RequestParam("upfiles") MultipartFile[] file) {
-        List<FileDTO> data = uploadFacade.multiFileUpload(file);
-        String msg = "";
-        if (ListUtil.isNotEmpty(data)) {
-            for (int i = 0; i < data.size(); i++) {
-                if (data.get(i).getState().equals("FAILURE")) {
-                    msg += "第【" + (i + 1) + "】张图片上传失败," + data.get(i).getInfo() + ";";
-                }
-            }
-        }
-        if (StringUtil.isNotBlank(msg)) {
-            return RespDTO.onError(msg);
+    @PostMapping("/deleteRemoteFile")
+    @ApiOperation(value = "智能预问诊-文件删除")
+    public RespDTO<FileDTO> deleteRemoteFile(@RequestParam("md5") String md5) {
+        FileDTO data = uploadFacade.deleteRemoteFile(md5);
+        if (data.getState().equals("FAILURE")) {
+            return RespDTO.onError(data.getInfo());
         } else {
             return RespDTO.onSuc(data);
         }
     }
 
-    @ApiOperation(value = "智能预问诊-多个文件上传同时生成缩略图")
+
+    @ApiOperation(value = "智能预问诊-多个文件上传")
     @CrossOrigin(allowCredentials = "true", allowedHeaders = "*", methods = { RequestMethod.POST }, origins = "*")
     @PostMapping(value = "/uploadImageThums", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
-    public RespDTO<List<FileThumDTO>> multiFileThumUpload(@RequestParam("upfiles") MultipartFile[] file, @RequestParam("type") Integer[] type) {
-        List<FileThumDTO> data = uploadFacade.multiFileThumUpload(file, type);
+    public RespDTO<List<FileDTO>> multiFileThumUpload(@RequestParam("upfiles") MultipartFile[] file, @RequestParam("type") Integer[] type) {
+        List<FileDTO> data = uploadFacade.multiFileThumUpload(file, type);
         String msg = "";
         if (ListUtil.isNotEmpty(data)) {
             for (int i = 0; i < data.size(); i++) {
-                if (data.get(i).getSource() != null && data.get(i).getSource().getState().equals("FAILURE")) {
-                    msg += "第【" + (i + 1) + "】张图片上传失败," + data.get(i).getSource().getInfo() + ";";
-                }
-                if (data.get(i).getThumbnail() != null && data.get(i).getThumbnail().getState().equals("FAILURE")) {
-                    msg += "第【" + (i + 1) + "】张图片缩略图上传失败," + data.get(i).getThumbnail().getInfo() + ";";
+                if ("FAILURE".equals(data.get(i).getState())) {
+                    msg += "第【" + (i + 1) + "】张图片上传失败," + data.get(i).getInfo() + ";";
                 }
             }
         }
@@ -98,11 +75,4 @@ public class UploadController {
             return RespDTO.onSuc(data);
         }
     }
-
-    @PostMapping("/deleteRemoteFile")
-    @ApiOperation(value = "智能预问诊-文件删除")
-    public String deleteRemoteFile(@RequestParam("path") String path) {
-        String data = FastJsonUtils.getBeanToJson(uploadFacade.deleteRemoteFile(path));
-        return data;
-    }
 }

+ 31 - 5
prec-service/src/main/resources/logback-spring.xml

@@ -15,7 +15,7 @@
                     converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
     <!-- 彩色日志格式 -->
     <!--<property name="CONSOLE_LOG_PATTERN"-->
-              <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
+    <!--value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(-&#45;&#45;){faint} %clr([%15.15t]){faint} %clr(%logger){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>-->
     <!--包名输出缩进对齐-->
     <property name="CONSOLE_LOG_PATTERN"
               value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
@@ -229,6 +229,22 @@
         </encoder>
     </appender>
 
+    <appender name="LOGSTASHPRE" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.121:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"prec-service"}</customFields>
+        </encoder>
+    </appender>
+
+    <appender name="LOGSTASHPRO" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
+        <destination>192.168.2.122:5044</destination>
+        <!-- encoder必须配置,有多种可选 -->
+        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
+            <customFields>{"appname":"prec-service"}</customFields>
+        </encoder>
+    </appender>
+
     <!-- 本地环境下的日志配置 -->
     <springProfile name="local">
         <root level="INFO">
@@ -236,7 +252,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
         </root>
     </springProfile>
@@ -248,7 +263,6 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHDEV"/>
         </root>
@@ -261,12 +275,23 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <!-- 生产环境将请stdout去掉 -->
             <appender-ref ref="STDOUT"/>
             <appender-ref ref="LOGSTASHTEST"/>
         </root>
     </springProfile>
 
+    <!-- 预发布环境下的日志配置 -->
+    <springProfile name="pre">
+        <root level="INFO">
+            <appender-ref ref="ERROR"/>
+            <appender-ref ref="WARN"/>
+            <appender-ref ref="INFO"/>
+            <appender-ref ref="DEBUG"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRE"/>
+        </root>
+    </springProfile>
+
     <!-- 生产环境下的日志配置 -->
     <springProfile name="pro">
         <root level="INFO">
@@ -274,7 +299,8 @@
             <appender-ref ref="WARN"/>
             <appender-ref ref="INFO"/>
             <appender-ref ref="DEBUG"/>
-            <appender-ref ref="LOGSTASH"/>
+            <appender-ref ref="STDOUT"/>
+            <appender-ref ref="LOGSTASHPRO"/>
         </root>
     </springProfile>
 </configuration>

+ 5 - 5
precman-service/pom.xml

@@ -154,11 +154,6 @@
             <artifactId>spring-cloud-starter-stream-rabbit</artifactId>
         </dependency>
 
-        <dependency>
-            <groupId>net.oschina.zcx7878</groupId>
-            <artifactId>fastdfs-client-java</artifactId>
-            <version>1.27.0.0</version>
-        </dependency>
         <!-- 文件上传相关架包 -->
         <dependency>
             <groupId>commons-fileupload</groupId>
@@ -171,6 +166,11 @@
             <version>2.4</version>
         </dependency>
 
+        <dependency>
+            <groupId>com.squareup.okhttp3</groupId>
+            <artifactId>okhttp</artifactId>
+        </dependency>
+
     </dependencies>
 
     <build>

+ 0 - 161
precman-service/src/main/java/com/diagbot/client/fastdfs/FastDFSClient.java

@@ -1,161 +0,0 @@
-package com.diagbot.client.fastdfs;
-
-import org.csource.common.NameValuePair;
-import org.csource.fastdfs.ClientGlobal;
-import org.csource.fastdfs.FileInfo;
-import org.csource.fastdfs.ServerInfo;
-import org.csource.fastdfs.StorageClient;
-import org.csource.fastdfs.StorageServer;
-import org.csource.fastdfs.TrackerClient;
-import org.csource.fastdfs.TrackerServer;
-import org.slf4j.LoggerFactory;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * @Description: FastDFS 客户端
- * @author: gaodm
- * @time: 2018/11/13 13:55
- */
-public class FastDFSClient {
-    private static org.slf4j.Logger logger = LoggerFactory.getLogger(FastDFSClient.class);
-
-    public static String[] upload(FastDFSFile file) {
-        logger.info("File Name: " + file.getName() + "File Length:" + file.getContent().length);
-
-        // 设置元信息
-        NameValuePair[] meta_list = new NameValuePair[2];
-        meta_list[0] = new NameValuePair("name", file.getName());
-        meta_list[1] = new NameValuePair("ext", file.getExt());
-
-        long startTime = System.currentTimeMillis();
-        String[] uploadResults = null;
-        StorageClient storageClient = null;
-        try {
-            storageClient = getTrackerClient();
-            uploadResults = storageClient.upload_file(file.getContent(), file.getExt(), meta_list);
-        } catch (IOException e) {
-            logger.error("IO Exception when uploadind the file:" + file.getName(), e);
-        } catch (Exception e) {
-            logger.error("Non IO Exception when uploadind the file:" + file.getName(), e);
-        }
-        logger.info("upload_file time used:" + (System.currentTimeMillis() - startTime) + " ms");
-
-        if (uploadResults == null && storageClient != null) {
-            logger.error("upload file fail, error code:" + storageClient.getErrorCode());
-        }
-        String groupName = uploadResults[0];
-        String remoteFileName = uploadResults[1];
-
-        logger.info("upload file successfully!!!" + "group_name:" + groupName + ", remoteFileName:" + " " + remoteFileName);
-        return uploadResults;
-    }
-
-    public static FileInfo getFile(String groupName, String remoteFileName) {
-        try {
-            StorageClient storageClient = getTrackerClient();
-            return storageClient.get_file_info(groupName, remoteFileName);
-        } catch (IOException e) {
-            logger.error("IO Exception: Get File from Fast DFS failed", e);
-        } catch (Exception e) {
-            logger.error("Non IO Exception: Get File from Fast DFS failed", e);
-        }
-        return null;
-    }
-
-    public static InputStream downFile(String groupName, String remoteFileName) {
-        try {
-            StorageClient storageClient = getTrackerClient();
-            byte[] fileByte = storageClient.download_file(groupName, remoteFileName);
-            InputStream ins = new ByteArrayInputStream(fileByte);
-            return ins;
-        } catch (IOException e) {
-            logger.error("IO Exception: Get File from Fast DFS failed", e);
-        } catch (Exception e) {
-            logger.error("Non IO Exception: Get File from Fast DFS failed", e);
-        }
-        return null;
-    }
-
-    public static void deleteFile(String groupName, String remoteFileName)
-            throws Exception {
-        StorageClient storageClient = getTrackerClient();
-        int i = storageClient.delete_file(groupName, remoteFileName);
-        logger.info("delete file successfully!!!" + i);
-    }
-
-    public static StorageServer[] getStoreStorages(String groupName)
-            throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        return trackerClient.getStoreStorages(trackerServer, groupName);
-    }
-
-    public static ServerInfo[] getFetchStorages(String groupName,
-                                                String remoteFileName) throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        return trackerClient.getFetchStorages(trackerServer, groupName, remoteFileName);
-    }
-
-    public static String getTrackerUrl() throws IOException {
-        return "http://" + getTrackerServer().getInetSocketAddress().getHostString() + ":" + ClientGlobal.getG_tracker_http_port() + "/";
-    }
-
-    private static StorageClient getTrackerClient() throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        StorageServer storageServer = getStorageServer(getStorageServerIp(trackerClient, trackerServer));
-        StorageClient storageClient
-                = new StorageClient(trackerServer, storageServer);
-        return storageClient;
-    }
-
-    private static TrackerServer getTrackerServer() throws IOException {
-        TrackerClient trackerClient = new TrackerClient();
-        TrackerServer trackerServer = trackerClient.getConnection();
-        return trackerServer;
-    }
-
-    /**
-     * 获得可用的storage IP
-     *
-     * @param trackerClient
-     * @param trackerServer
-     * @return 返回storage IP
-     */
-    private static String getStorageServerIp(TrackerClient trackerClient, TrackerServer trackerServer) {
-        String storageIp = null;
-        if (trackerClient != null && trackerServer != null) {
-            try {
-                StorageServer storageServer = trackerClient.getStoreStorage(trackerServer, "group1");
-                storageIp = storageServer.getSocket().getInetAddress().getHostAddress();
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-        logger.info("获取组中可用的storage IP:" + storageIp);
-        return storageIp;
-    }
-
-    /**
-     * 得到Storage服务
-     *
-     * @param storageIp
-     * @return 返回Storage服务
-     */
-    private static StorageServer getStorageServer(String storageIp) {
-        StorageServer storageServer = null;
-        if (storageIp != null && !("").equals(storageIp)) {
-            try {
-                storageServer = new StorageServer(storageIp, Integer.parseInt("23000"), Integer.parseInt("2"));
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-        logger.info("storage server生成");
-        return storageServer;
-    }
-}

+ 0 - 70
precman-service/src/main/java/com/diagbot/client/fastdfs/FastDFSFile.java

@@ -1,70 +0,0 @@
-package com.diagbot.client.fastdfs;
-
-public class FastDFSFile {
-    private String name;
-
-    private byte[] content;
-
-    private String ext;
-
-    private String md5;
-
-    private String author;
-
-    public FastDFSFile(String name, byte[] content, String ext, String height,
-                       String width, String author) {
-        super();
-        this.name = name;
-        this.content = content;
-        this.ext = ext;
-        this.author = author;
-    }
-
-    public FastDFSFile(String name, byte[] content, String ext) {
-        super();
-        this.name = name;
-        this.content = content;
-        this.ext = ext;
-
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public byte[] getContent() {
-        return content;
-    }
-
-    public void setContent(byte[] content) {
-        this.content = content;
-    }
-
-    public String getExt() {
-        return ext;
-    }
-
-    public void setExt(String ext) {
-        this.ext = ext;
-    }
-
-    public String getMd5() {
-        return md5;
-    }
-
-    public void setMd5(String md5) {
-        this.md5 = md5;
-    }
-
-    public String getAuthor() {
-        return author;
-    }
-
-    public void setAuthor(String author) {
-        this.author = author;
-    }
-}

+ 0 - 49
precman-service/src/main/java/com/diagbot/config/FastDFSConfigurer.java

@@ -1,49 +0,0 @@
-package com.diagbot.config;
-
-import lombok.extern.slf4j.Slf4j;
-import org.csource.fastdfs.ClientGlobal;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Properties;
-
-/**
- * @Description:
- * @author: gaodm
- * @time: 2019/3/1 14:15
- */
-@Configuration
-@Slf4j
-public class FastDFSConfigurer {
-    @Value("${fastdfs.connect_timeout_in_seconds}")
-    private String connectTimeout;
-    @Value("${fastdfs.network_timeout_in_seconds}")
-    private String networkTimeout;
-    @Value("${fastdfs.charset}")
-    private String charset;
-    @Value("${fastdfs.http_tracker_http_port}")
-    private String httpTrackerHttpPort;
-    @Value("${fastdfs.http_anti_steal_token}")
-    private String httpAntiStealToken;
-    @Value("${fastdfs.tracker_servers}")
-    private String trackerServers;
-
-    @Bean
-    public Integer fastDFSInit(){
-        try {
-            Properties props = new Properties();
-            props.put(ClientGlobal.PROP_KEY_CONNECT_TIMEOUT_IN_SECONDS, connectTimeout);
-            props.put(ClientGlobal.PROP_KEY_NETWORK_TIMEOUT_IN_SECONDS, networkTimeout);
-            props.put(ClientGlobal.PROP_KEY_CHARSET, charset);
-            props.put(ClientGlobal.PROP_KEY_HTTP_TRACKER_HTTP_PORT, httpTrackerHttpPort);
-            props.put(ClientGlobal.PROP_KEY_HTTP_ANTI_STEAL_TOKEN, httpAntiStealToken);
-            props.put(ClientGlobal.PROP_KEY_TRACKER_SERVERS, trackerServers);
-            ClientGlobal.initByProperties(props);
-
-        } catch (Exception e) {
-            log.error("FastDFS Client Init Fail!", e);
-        }
-        return 1;
-    }
-}

+ 1 - 0
precman-service/src/main/java/com/diagbot/dto/FileDTO.java

@@ -15,6 +15,7 @@ public class FileDTO {
     private String original;
     private String title;
     private String url;
+    private String md5;
     private String info;
 
     public FileDTO(String state, String info) {

+ 17 - 0
precman-service/src/main/java/com/diagbot/dto/FileDeleteDTO.java

@@ -0,0 +1,17 @@
+package com.diagbot.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * @Description:
+ * @author: gaodm
+ * @time: 2019/11/4 11:09
+ */
+@Getter
+@Setter
+public class FileDeleteDTO {
+    private Object data;
+    private String message;
+    private String status;
+}

+ 0 - 0
precman-service/src/main/java/com/diagbot/dto/FileUploadDTO.java


部分文件因文件數量過多而無法顯示