From 7764448b5d02087a1bfb89c5680146dd74200aa3 Mon Sep 17 00:00:00 2001
From: TangCheng <tangchengmail@163.com>
Date: 星期五, 11 四月 2025 11:12:37 +0800
Subject: [PATCH] 工作流大模型输出用#

---
 JAVA/SMTAIServer/src/main/java/com/smtaiserver/smtaiserver/javaai/jsonflow/node/SMTJsonFlowNodeLLM.java |   14 +++++++++++---
 1 files changed, 11 insertions(+), 3 deletions(-)

diff --git a/JAVA/SMTAIServer/src/main/java/com/smtaiserver/smtaiserver/javaai/jsonflow/node/SMTJsonFlowNodeLLM.java b/JAVA/SMTAIServer/src/main/java/com/smtaiserver/smtaiserver/javaai/jsonflow/node/SMTJsonFlowNodeLLM.java
index 7eea6df..b308b68 100644
--- a/JAVA/SMTAIServer/src/main/java/com/smtaiserver/smtaiserver/javaai/jsonflow/node/SMTJsonFlowNodeLLM.java
+++ b/JAVA/SMTAIServer/src/main/java/com/smtaiserver/smtaiserver/javaai/jsonflow/node/SMTJsonFlowNodeLLM.java
@@ -59,9 +59,17 @@
 			String realPrompt = SMTAIServerApp.getApp().convGlobalMacroString(_prompt, execArg._jsonArgs);
 			execArg._tranReq.traceLLMDebug("call llm : " +_llmId + "\n" + realPrompt);
 			
-			String result = llm.callWithMessage(null, realPrompt, execArg._tranReq);
-			execArg._tranReq.traceLLMDebug("call llm result:\n" + result);
-			execArg._jsonArgs.set(_argKey, result);
+			boolean orgSet = execArg._tranReq.setSendStarStream(true);
+			try
+			{
+				String result = llm.callWithMessage(null, realPrompt, execArg._tranReq);
+				execArg._tranReq.traceLLMDebug("call llm result:\n" + result);
+				execArg._jsonArgs.set(_argKey, result);
+			}
+			finally
+			{
+				execArg._tranReq.setSendStarStream(orgSet);
+			}
 		}
 		finally
 		{

--
Gitblit v1.9.3