Browse Source

【强化】AI任务(ollamaJobHandler)优化:针对 “model” 模型配置信息,从执行器侧文件类配置调整至调度中心“任务参数”动态配置,支持集成多模型、并结合任务动态配置切换。

xuxueli 4 months ago
parent
commit
05c0f83fc0

+ 5 - 3
doc/XXL-JOB官方文档.md

@@ -1206,7 +1206,8 @@ public void demoJobHandler() throws Exception {
 ```
 {
     "input": "{输入信息,必填信息}",
-    "prompt": "{模型prompt,可选信息}"
+    "prompt": "{模型prompt,可选信息}",
+    "model": "{模型实现,如qwen3:0.6b,可选信息}"
 }
 ```
 - b、difyWorkflowJobHandler:DifyWorkflow 任务,支持自定义inputs、user、baseUrl、apiKey 等输入信息,示例参数如下;
@@ -2543,7 +2544,9 @@ public void execute() {
 
 
 ### 7.40 版本 v3.2.0 Release Notes[规划中]
-- 1、【规划中】登录安全升级,密码加密处理算法从Md5改为Sha256;
+- 1、【强化】AI任务(ollamaJobHandler)优化:针对 “model” 模型配置信息,从执行器侧文件类配置调整至调度中心“任务参数”动态配置,支持集成多模型、并结合任务动态配置切换。
+- 2、【升级】升级多项maven依赖至较新版本,如 spring-ai、dify 等;
+- 3、【规划中】登录安全升级,密码加密处理算法从Md5改为Sha256;
 ```
 // 1、用户表password字段需要调整长度,执行如下命令
 ALTER TABLE xxl_conf_user
@@ -2555,7 +2558,6 @@ UPDATE xxl_conf_user t SET t.password = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5
 - 2、【规划中】登录态持久化逻辑调整,简化代码逻辑;
 - 3、【规划中】异常页面处理逻辑优化,新增兜底落地页配置;
 - 4、【规划中】登录信息页面空值处理优化,避免空值影响ftl渲染;
-- 5、【规划中】升级多项maven依赖至较新版本,如 xxl-tool、gson、junit 等;
 - 1、【规划中】登陆态Token生成逻辑优化,混淆登陆时间属性,降低token泄漏风险;
 - 2、【规划中】组件扫描改为BeanPostProcessor方式,避免小概率情况下提前初始化;底层组件移除单例写法,汇总factory统一管理;
 

+ 3 - 6
xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/pom.xml

@@ -16,8 +16,8 @@
     <url>https://www.xuxueli.com/</url>
 
     <properties>
-        <spring-ai.version>1.0.0-M6</spring-ai.version>
-        <dify-java-client.version>1.0.7</dify-java-client.version>
+        <spring-ai.version>1.0.1</spring-ai.version>
+        <dify-java-client.version>1.1.7</dify-java-client.version>
     </properties>
 
     <dependencyManagement>
@@ -54,7 +54,7 @@
         <!-- spring-ai -->
         <dependency>
             <groupId>org.springframework.ai</groupId>
-            <artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
+            <artifactId>spring-ai-starter-model-ollama</artifactId>
             <version>${spring-ai.version}</version>
         </dependency>
 
@@ -81,9 +81,6 @@
                         </goals>
                     </execution>
                 </executions>
-                <configuration>
-                    <fork>true</fork>
-                </configuration>
             </plugin>
         </plugins>
     </build>

+ 0 - 27
xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/config/XxlJobConfig.java

@@ -3,12 +3,6 @@ package com.xxl.job.executor.config;
 import com.xxl.job.core.executor.impl.XxlJobSpringExecutor;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.springframework.ai.chat.client.ChatClient;
-import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
-import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
-import org.springframework.ai.chat.memory.InMemoryChatMemory;
-import org.springframework.ai.ollama.OllamaChatModel;
-import org.springframework.ai.ollama.api.OllamaOptions;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
@@ -67,25 +61,4 @@ public class XxlJobConfig {
         return xxlJobSpringExecutor;
     }
 
-    /**
-     * ChatClient
-     *
-     * @param ollamaChatModel
-     * @return
-     * @throws Exception
-     */
-    @Bean
-    public ChatClient chatClient(OllamaChatModel ollamaChatModel) throws Exception {
-        // init ollamaiChatClient
-        ChatClient ollamaiChatClient  = ChatClient
-                .builder(ollamaChatModel)
-                .defaultAdvisors(new MessageChatMemoryAdvisor(new InMemoryChatMemory()))    // 管理对话上下文记忆
-                .defaultAdvisors(new SimpleLoggerAdvisor())                                 // 记录日志的Advisor,
-                .defaultOptions(OllamaOptions.builder().topP(0.7).build())                  // 设置ChatModel参数
-                .build();
-
-        return ollamaiChatClient;
-    }
-
-
 }

+ 35 - 10
xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/controller/IndexController.java

@@ -14,7 +14,11 @@ import jakarta.servlet.http.HttpServletResponse;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.ai.chat.client.ChatClient;
-import org.springframework.beans.factory.annotation.Value;
+import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
+import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
+import org.springframework.ai.chat.memory.MessageWindowChatMemory;
+import org.springframework.ai.ollama.OllamaChatModel;
+import org.springframework.ai.ollama.api.OllamaOptions;
 import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.GetMapping;
@@ -24,7 +28,6 @@ import org.springframework.web.bind.annotation.ResponseBody;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.FluxSink;
 
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.function.Consumer;
@@ -45,8 +48,9 @@ public class IndexController {
     // --------------------------------- ollama chat ---------------------------------
 
     @Resource
-    private ChatClient chatClient;
-    private static String prompt = "你好,你是一个研发工程师,擅长解决技术类问题。";
+    private OllamaChatModel ollamaChatModel;
+    private String prompt = "你好,你是一个研发工程师,擅长解决技术类问题。";
+    private String modle = "qwen3:0.6b";
 
     /**
      * ChatClient 简单调用
@@ -54,13 +58,24 @@ public class IndexController {
     @GetMapping("/chat/simple")
     @ResponseBody
     public String simpleChat(@RequestParam(value = "input") String input) {
-        String result = chatClient
+
+        // build chat-client
+        ChatClient ollamaChatClient = ChatClient
+                .builder(ollamaChatModel)
+                .defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
+                .defaultAdvisors(SimpleLoggerAdvisor.builder().build())
+                .build();
+
+        // call ollama
+        String response = ollamaChatClient
                 .prompt(prompt)
                 .user(input)
+                .options(OllamaOptions.builder().model(modle).build())
                 .call()
                 .content();
-        System.out.println("result: " + result);
-        return result;
+
+        logger.info("result: " + response);
+        return response;
     }
 
     /**
@@ -69,9 +84,19 @@ public class IndexController {
     @GetMapping("/chat/stream")
     public Flux<String> streamChat(HttpServletResponse response, @RequestParam(value = "input") String input) {
         response.setCharacterEncoding("UTF-8");
-        return chatClient
+
+        // build chat-client
+        ChatClient ollamaChatClient = ChatClient
+                .builder(ollamaChatModel)
+                .defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
+                .defaultAdvisors(SimpleLoggerAdvisor.builder().build())
+                .build();
+
+        // call ollama
+        return ollamaChatClient
                 .prompt(prompt)
                 .user(input)
+                .options(OllamaOptions.builder().model(modle).build())
                 .stream()
                 .content();
     }
@@ -85,7 +110,7 @@ public class IndexController {
 
     @GetMapping("/dify/simple")
     @ResponseBody
-    public String difySimple(@RequestParam(required = false, value = "input") String input) throws IOException {
+    public String difySimple(@RequestParam(required = false, value = "input") String input) throws Exception {
 
         Map<String, Object> inputs = new HashMap<>();
         inputs.put("input", input);
@@ -167,7 +192,7 @@ public class IndexController {
                             sink.error(throwable);
                         }
                     });
-                } catch (IOException e) {
+                } catch (Exception e) {
                     throw new RuntimeException(e);
                 }
             }

+ 37 - 7
xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/jobhandler/AIXxlJob.java

@@ -10,6 +10,11 @@ import io.github.imfangs.dify.client.model.workflow.WorkflowRunRequest;
 import io.github.imfangs.dify.client.model.workflow.WorkflowRunResponse;
 import jakarta.annotation.Resource;
 import org.springframework.ai.chat.client.ChatClient;
+import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
+import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
+import org.springframework.ai.chat.memory.MessageWindowChatMemory;
+import org.springframework.ai.ollama.OllamaChatModel;
+import org.springframework.ai.ollama.api.OllamaOptions;
 import org.springframework.stereotype.Component;
 
 import java.util.HashMap;
@@ -23,8 +28,10 @@ import java.util.Map;
 @Component
 public class AIXxlJob {
 
+    // --------------------------------- ollama chat ---------------------------------
+
     @Resource
-    private ChatClient chatClient;
+    private OllamaChatModel ollamaChatModel;
 
     /**
      * 1、ollama Chat任务
@@ -38,7 +45,7 @@ public class AIXxlJob {
      *  </pre>
      */
     @XxlJob("ollamaJobHandler")
-    public void ollamaJobHandler() throws Exception {
+    public void ollamaJobHandler() {
 
         // param
         String param = XxlJobHelper.getJobParam();
@@ -53,15 +60,18 @@ public class AIXxlJob {
         OllamaParam ollamaParam = null;
         try {
             ollamaParam = GsonTool.fromJson(param, OllamaParam.class);
-            if (ollamaParam.getPrompt() == null) {
+            if (ollamaParam.getPrompt()==null || ollamaParam.getPrompt().isBlank()) {
                 ollamaParam.setPrompt("你是一个研发工程师,擅长解决技术类问题。");
             }
-            if (ollamaParam.getInput() == null || ollamaParam.getInput().trim().isEmpty()) {
+            if (ollamaParam.getInput() == null || ollamaParam.getInput().isBlank()) {
                 XxlJobHelper.log("input is empty.");
 
                 XxlJobHelper.handleFail();
                 return;
             }
+            if (ollamaParam.getModel()==null || ollamaParam.getModel().isBlank()) {
+                ollamaParam.setModel("qwen3:0.6b");
+            }
         } catch (Exception e) {
             XxlJobHelper.log(new RuntimeException("OllamaParam parse error", e));
             XxlJobHelper.handleFail();
@@ -71,18 +81,28 @@ public class AIXxlJob {
         // input
         XxlJobHelper.log("<br><br><b>【Input】: " + ollamaParam.getInput()+ "</b><br><br>");
 
-        // invoke
-        String result = chatClient
+        // build chat-client
+        ChatClient ollamaChatClient = ChatClient
+                .builder(ollamaChatModel)
+                .defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
+                .defaultAdvisors(SimpleLoggerAdvisor.builder().build())
+                .build();
+
+        // call ollama
+        String response = ollamaChatClient
                 .prompt(ollamaParam.getPrompt())
                 .user(ollamaParam.getInput())
+                .options(OllamaOptions.builder().model(ollamaParam.getModel()).build())
                 .call()
                 .content();
-        XxlJobHelper.log("<br><br><b>【Output】: " + result+ "</b><br><br>");
+
+        XxlJobHelper.log("<br><br><b>【Output】: " + response + "</b><br><br>");
     }
 
     private static class OllamaParam{
         private String input;
         private String prompt;
+        private String model;
 
         public String getInput() {
             return input;
@@ -99,9 +119,19 @@ public class AIXxlJob {
         public void setPrompt(String prompt) {
             this.prompt = prompt;
         }
+
+        public String getModel() {
+            return model;
+        }
+
+        public void setModel(String model) {
+            this.model = model;
+        }
     }
 
 
+    // --------------------------------- dify workflow ---------------------------------
+
     /**
      * 2、dify Workflow任务
      *

+ 2 - 5
xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/resources/application.properties

@@ -31,9 +31,6 @@ xxl.job.executor.logretentiondays=30
 
 
 ### ollama
+spring.ai.model.chat=ollama
+### ollama url
 spring.ai.ollama.base-url=http://localhost:11434
-### chat enabled
-spring.ai.ollama.chat.enabled=true
-### chat model,must install it locally through ollama
-spring.ai.ollama.chat.options.model=qwen2.5:1.5b
-spring.ai.ollama.chat.options.temperature=0.8