Skip to content

Commit 882d577

Browse files
committed
feat: 支持工作流ai对话节点添加节点上下文 (#1791)
(cherry picked from commit f65546a)
1 parent 960132a commit 882d577

File tree

5 files changed

+49
-7
lines changed

5 files changed

+49
-7
lines changed

apps/application/flow/step_node/ai_chat_step_node/i_chat_node.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ class ChatNodeSerializer(serializers.Serializer):
2626

2727
model_params_setting = serializers.DictField(required=False, error_messages=ErrMessage.integer("模型参数相关设置"))
2828

29+
dialogue_type = serializers.CharField(required=True, error_messages=ErrMessage.char("上下文类型"))
30+
2931

3032
class IChatNode(INode):
3133
type = 'ai-chat-node'
@@ -39,5 +41,6 @@ def _run(self):
3941
def execute(self, model_id, system, prompt, dialogue_number, history_chat_record, stream, chat_id,
4042
chat_record_id,
4143
model_params_setting=None,
44+
dialogue_type=None,
4245
**kwargs) -> NodeResult:
4346
pass

apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py

Lines changed: 25 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313
from django.db.models import QuerySet
1414
from langchain.schema import HumanMessage, SystemMessage
15-
from langchain_core.messages import BaseMessage
15+
from langchain_core.messages import BaseMessage, AIMessage
1616

1717
from application.flow.i_step_node import NodeResult, INode
1818
from application.flow.step_node.ai_chat_step_node.i_chat_node import IChatNode
@@ -72,6 +72,22 @@ def get_default_model_params_setting(model_id):
7272
return model_params_setting
7373

7474

75+
def get_node_message(chat_record, runtime_node_id):
76+
node_details = chat_record.get_node_details_runtime_node_id(runtime_node_id)
77+
if node_details is None:
78+
return []
79+
return [HumanMessage(node_details.get('question')), AIMessage(node_details.get('answer'))]
80+
81+
82+
def get_workflow_message(chat_record):
83+
return [chat_record.get_human_message(), chat_record.get_ai_message()]
84+
85+
86+
def get_message(chat_record, dialogue_type, runtime_node_id):
87+
return get_node_message(chat_record, runtime_node_id) if dialogue_type == 'NODE' else get_workflow_message(
88+
chat_record)
89+
90+
7591
class BaseChatNode(IChatNode):
7692
def save_context(self, details, workflow_manage):
7793
self.context['answer'] = details.get('answer')
@@ -80,12 +96,17 @@ def save_context(self, details, workflow_manage):
8096

8197
def execute(self, model_id, system, prompt, dialogue_number, history_chat_record, stream, chat_id, chat_record_id,
8298
model_params_setting=None,
99+
dialogue_type=None,
83100
**kwargs) -> NodeResult:
101+
if dialogue_type is None:
102+
dialogue_type = 'WORKFLOW'
103+
84104
if model_params_setting is None:
85105
model_params_setting = get_default_model_params_setting(model_id)
86106
chat_model = get_model_instance_by_model_user_id(model_id, self.flow_params_serializer.data.get('user_id'),
87107
**model_params_setting)
88-
history_message = self.get_history_message(history_chat_record, dialogue_number)
108+
history_message = self.get_history_message(history_chat_record, dialogue_number, dialogue_type,
109+
self.runtime_node_id)
89110
self.context['history_message'] = history_message
90111
question = self.generate_prompt_question(prompt)
91112
self.context['question'] = question.content
@@ -103,10 +124,10 @@ def execute(self, model_id, system, prompt, dialogue_number, history_chat_record
103124
_write_context=write_context)
104125

105126
@staticmethod
106-
def get_history_message(history_chat_record, dialogue_number):
127+
def get_history_message(history_chat_record, dialogue_number, dialogue_type, runtime_node_id):
107128
start_index = len(history_chat_record) - dialogue_number
108129
history_message = reduce(lambda x, y: [*x, *y], [
109-
[history_chat_record[index].get_human_message(), history_chat_record[index].get_ai_message()]
130+
get_message(history_chat_record[index], dialogue_type, runtime_node_id)
110131
for index in
111132
range(start_index if start_index > 0 else 0, len(history_chat_record))], [])
112133
return history_message

apps/application/models/application.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -167,5 +167,8 @@ def get_human_message(self):
167167
def get_ai_message(self):
168168
return AIMessage(content=self.answer_text)
169169

170+
def get_node_details_runtime_node_id(self, runtime_node_id):
171+
return self.details.get(runtime_node_id, None)
172+
170173
class Meta:
171174
db_table = "application_chat_record"

ui/src/workflow/common/NodeContainer.vue

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -93,9 +93,8 @@
9393
v-if="showAnchor"
9494
@mousemove.stop
9595
@mousedown.stop
96-
@keydown.stop
9796
@click.stop
98-
@wheel.stop
97+
@wheel="handleWheel"
9998
:show="showAnchor"
10099
:id="id"
101100
style="left: 100%; top: 50%; transform: translate(0, -50%)"
@@ -142,6 +141,12 @@ const showNode = computed({
142141
return true
143142
}
144143
})
144+
const handleWheel = (event: any) => {
145+
const isCombinationKeyPressed = event.ctrlKey || event.metaKey
146+
if (!isCombinationKeyPressed) {
147+
event.stopPropagation()
148+
}
149+
}
145150
const node_status = computed(() => {
146151
if (props.nodeModel.properties.status) {
147152
return props.nodeModel.properties.status

ui/src/workflow/nodes/ai-chat-node/index.vue

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,15 @@
148148
/>
149149
</el-form-item>
150150
<el-form-item label="历史聊天记录">
151+
<template #label>
152+
<div class="flex-between">
153+
<div>历史聊天记录</div>
154+
<el-select v-model="chat_data.dialogue_type" type="small" style="width: 100px">
155+
<el-option label="节点" value="NODE" />
156+
<el-option label="工作流" value="WORKFLOW" />
157+
</el-select>
158+
</div>
159+
</template>
151160
<el-input-number
152161
v-model="chat_data.dialogue_number"
153162
:min="0"
@@ -246,7 +255,8 @@ const form = {
246255
dialogue_number: 1,
247256
is_result: false,
248257
temperature: null,
249-
max_tokens: null
258+
max_tokens: null,
259+
dialogue_type: 'WORKFLOW'
250260
}
251261
252262
const chat_data = computed({

0 commit comments

Comments
 (0)