Files
urbanLifeline/dify/api/tests/fixtures/workflow/test-answer-order.yml

223 lines
4.6 KiB
YAML
Raw Permalink Normal View History

2025-12-01 17:21:38 +08:00
app:
description: 'this is a chatflow with 2 answer nodes.
it''s outouts should like:
```
--- answer 1 ---
foo
--- answer 2 ---
<llm''s outputs>
```'
icon: 🤖
icon_background: '#FFEAD5'
mode: advanced-chat
name: test-answer-order
use_icon_as_answer_icon: false
dependencies:
- current_identifier: null
type: marketplace
value:
marketplace_plugin_unique_identifier: langgenius/openai:0.2.6@e2665624a156f52160927bceac9e169bd7e5ae6b936ae82575e14c90af390e6e
version: null
kind: app
version: 0.4.0
workflow:
conversation_variables: []
environment_variables: []
features:
file_upload:
allowed_file_extensions:
- .JPG
- .JPEG
- .PNG
- .GIF
- .WEBP
- .SVG
allowed_file_types:
- image
allowed_file_upload_methods:
- local_file
- remote_url
enabled: false
fileUploadConfig:
audio_file_size_limit: 50
batch_count_limit: 5
file_size_limit: 15
image_file_size_limit: 10
video_file_size_limit: 100
workflow_file_upload_limit: 10
image:
enabled: false
number_limits: 3
transfer_methods:
- local_file
- remote_url
number_limits: 3
opening_statement: ''
retriever_resource:
enabled: true
sensitive_word_avoidance:
enabled: false
speech_to_text:
enabled: false
suggested_questions: []
suggested_questions_after_answer:
enabled: false
text_to_speech:
enabled: false
language: ''
voice: ''
graph:
edges:
- data:
isInIteration: false
isInLoop: false
sourceType: answer
targetType: answer
id: 1759052466526-source-1759052469368-target
source: '1759052466526'
sourceHandle: source
target: '1759052469368'
targetHandle: target
type: custom
zIndex: 0
- data:
isInIteration: false
isInLoop: false
sourceType: start
targetType: llm
id: 1759052439553-source-1759052580454-target
source: '1759052439553'
sourceHandle: source
target: '1759052580454'
targetHandle: target
type: custom
zIndex: 0
- data:
isInIteration: false
isInLoop: false
sourceType: llm
targetType: answer
id: 1759052580454-source-1759052466526-target
source: '1759052580454'
sourceHandle: source
target: '1759052466526'
targetHandle: target
type: custom
zIndex: 0
nodes:
- data:
selected: false
title: Start
type: start
variables: []
height: 52
id: '1759052439553'
position:
x: 30
y: 242
positionAbsolute:
x: 30
y: 242
sourcePosition: right
targetPosition: left
type: custom
width: 242
- data:
answer: '--- answer 1 ---
foo
'
selected: false
title: Answer
type: answer
variables: []
height: 100
id: '1759052466526'
position:
x: 632
y: 242
positionAbsolute:
x: 632
y: 242
selected: true
sourcePosition: right
targetPosition: left
type: custom
width: 242
- data:
answer: '--- answer 2 ---
{{#1759052580454.text#}}
'
selected: false
title: Answer 2
type: answer
variables: []
height: 103
id: '1759052469368'
position:
x: 934
y: 242
positionAbsolute:
x: 934
y: 242
selected: false
sourcePosition: right
targetPosition: left
type: custom
width: 242
- data:
context:
enabled: false
variable_selector: []
model:
completion_params:
temperature: 0.7
mode: chat
name: gpt-4o
provider: langgenius/openai/openai
prompt_template:
- id: 5c1d873b-06b2-4dce-939e-672882bbd7c0
role: system
text: ''
- role: user
text: '{{#sys.query#}}'
selected: false
title: LLM
type: llm
vision:
enabled: false
height: 88
id: '1759052580454'
position:
x: 332
y: 242
positionAbsolute:
x: 332
y: 242
selected: false
sourcePosition: right
targetPosition: left
type: custom
width: 242
viewport:
x: 126.2797574512839
y: 289.55932160537446
zoom: 1.0743222672006216
rag_pipeline_variables: []