Flowise

Форк
0
459 строк · 21.2 Кб
1
{
2
    "description": "Use Anthropic Claude with 200k context window to ingest whole document for QnA",
3
    "categories": "Buffer Memory,Prompt Template,Conversation Chain,ChatAnthropic,Langchain",
4
    "framework": "Langchain",
5
    "nodes": [
6
        {
7
            "width": 300,
8
            "height": 376,
9
            "id": "bufferMemory_0",
10
            "position": {
11
                "x": 240.5161028076149,
12
                "y": 165.35849026339048
13
            },
14
            "type": "customNode",
15
            "data": {
16
                "id": "bufferMemory_0",
17
                "label": "Buffer Memory",
18
                "version": 2,
19
                "name": "bufferMemory",
20
                "type": "BufferMemory",
21
                "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"],
22
                "category": "Memory",
23
                "description": "Retrieve chat messages stored in database",
24
                "inputParams": [
25
                    {
26
                        "label": "Session Id",
27
                        "name": "sessionId",
28
                        "type": "string",
29
                        "description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
30
                        "default": "",
31
                        "additionalParams": true,
32
                        "optional": true,
33
                        "id": "bufferMemory_0-input-sessionId-string"
34
                    },
35
                    {
36
                        "label": "Memory Key",
37
                        "name": "memoryKey",
38
                        "type": "string",
39
                        "default": "chat_history",
40
                        "additionalParams": true,
41
                        "id": "bufferMemory_0-input-memoryKey-string"
42
                    }
43
                ],
44
                "inputAnchors": [],
45
                "inputs": {
46
                    "sessionId": "",
47
                    "memoryKey": "chat_history"
48
                },
49
                "outputAnchors": [
50
                    {
51
                        "id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
52
                        "name": "bufferMemory",
53
                        "label": "BufferMemory",
54
                        "type": "BufferMemory | BaseChatMemory | BaseMemory"
55
                    }
56
                ],
57
                "outputs": {},
58
                "selected": false
59
            },
60
            "selected": false,
61
            "positionAbsolute": {
62
                "x": 240.5161028076149,
63
                "y": 165.35849026339048
64
            },
65
            "dragging": false
66
        },
67
        {
68
            "width": 300,
69
            "height": 383,
70
            "id": "conversationChain_0",
71
            "position": {
72
                "x": 958.9887390513221,
73
                "y": 318.8734467468765
74
            },
75
            "type": "customNode",
76
            "data": {
77
                "id": "conversationChain_0",
78
                "label": "Conversation Chain",
79
                "version": 3,
80
                "name": "conversationChain",
81
                "type": "ConversationChain",
82
                "baseClasses": ["ConversationChain", "LLMChain", "BaseChain", "Runnable"],
83
                "category": "Chains",
84
                "description": "Chat models specific conversational chain with memory",
85
                "inputParams": [
86
                    {
87
                        "label": "System Message",
88
                        "name": "systemMessagePrompt",
89
                        "type": "string",
90
                        "rows": 4,
91
                        "description": "If Chat Prompt Template is provided, this will be ignored",
92
                        "additionalParams": true,
93
                        "optional": true,
94
                        "default": "The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.",
95
                        "placeholder": "The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.",
96
                        "id": "conversationChain_0-input-systemMessagePrompt-string"
97
                    }
98
                ],
99
                "inputAnchors": [
100
                    {
101
                        "label": "Chat Model",
102
                        "name": "model",
103
                        "type": "BaseChatModel",
104
                        "id": "conversationChain_0-input-model-BaseChatModel"
105
                    },
106
                    {
107
                        "label": "Memory",
108
                        "name": "memory",
109
                        "type": "BaseMemory",
110
                        "id": "conversationChain_0-input-memory-BaseMemory"
111
                    },
112
                    {
113
                        "label": "Chat Prompt Template",
114
                        "name": "chatPromptTemplate",
115
                        "type": "ChatPromptTemplate",
116
                        "description": "Override existing prompt with Chat Prompt Template. Human Message must includes {input} variable",
117
                        "optional": true,
118
                        "id": "conversationChain_0-input-chatPromptTemplate-ChatPromptTemplate"
119
                    },
120
                    {
121
                        "label": "Input Moderation",
122
                        "description": "Detect text that could generate harmful output and prevent it from being sent to the language model",
123
                        "name": "inputModeration",
124
                        "type": "Moderation",
125
                        "optional": true,
126
                        "list": true,
127
                        "id": "conversationChain_0-input-inputModeration-Moderation"
128
                    }
129
                ],
130
                "inputs": {
131
                    "inputModeration": "",
132
                    "model": "{{chatAnthropic_0.data.instance}}",
133
                    "memory": "{{bufferMemory_0.data.instance}}",
134
                    "chatPromptTemplate": "{{chatPromptTemplate_0.data.instance}}",
135
                    "systemMessagePrompt": "The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know."
136
                },
137
                "outputAnchors": [
138
                    {
139
                        "id": "conversationChain_0-output-conversationChain-ConversationChain|LLMChain|BaseChain|Runnable",
140
                        "name": "conversationChain",
141
                        "label": "ConversationChain",
142
                        "type": "ConversationChain | LLMChain | BaseChain | Runnable"
143
                    }
144
                ],
145
                "outputs": {},
146
                "selected": false
147
            },
148
            "selected": false,
149
            "positionAbsolute": {
150
                "x": 958.9887390513221,
151
                "y": 318.8734467468765
152
            },
153
            "dragging": false
154
        },
155
        {
156
            "width": 300,
157
            "height": 574,
158
            "id": "chatAnthropic_0",
159
            "position": {
160
                "x": 585.3308245972187,
161
                "y": -116.32789506560908
162
            },
163
            "type": "customNode",
164
            "data": {
165
                "id": "chatAnthropic_0",
166
                "label": "ChatAnthropic",
167
                "version": 6.0,
168
                "name": "chatAnthropic",
169
                "type": "ChatAnthropic",
170
                "baseClasses": ["ChatAnthropic", "BaseChatModel", "BaseLanguageModel", "Runnable"],
171
                "category": "Chat Models",
172
                "description": "Wrapper around ChatAnthropic large language models that use the Chat endpoint",
173
                "inputParams": [
174
                    {
175
                        "label": "Connect Credential",
176
                        "name": "credential",
177
                        "type": "credential",
178
                        "credentialNames": ["anthropicApi"],
179
                        "id": "chatAnthropic_0-input-credential-credential"
180
                    },
181
                    {
182
                        "label": "Model Name",
183
                        "name": "modelName",
184
                        "type": "asyncOptions",
185
                        "loadMethod": "listModels",
186
                        "default": "claude-3-haiku",
187
                        "id": "chatAnthropic_0-input-modelName-options"
188
                    },
189
                    {
190
                        "label": "Temperature",
191
                        "name": "temperature",
192
                        "type": "number",
193
                        "step": 0.1,
194
                        "default": 0.9,
195
                        "optional": true,
196
                        "id": "chatAnthropic_0-input-temperature-number"
197
                    },
198
                    {
199
                        "label": "Max Tokens",
200
                        "name": "maxTokensToSample",
201
                        "type": "number",
202
                        "step": 1,
203
                        "optional": true,
204
                        "additionalParams": true,
205
                        "id": "chatAnthropic_0-input-maxTokensToSample-number"
206
                    },
207
                    {
208
                        "label": "Top P",
209
                        "name": "topP",
210
                        "type": "number",
211
                        "step": 0.1,
212
                        "optional": true,
213
                        "additionalParams": true,
214
                        "id": "chatAnthropic_0-input-topP-number"
215
                    },
216
                    {
217
                        "label": "Top K",
218
                        "name": "topK",
219
                        "type": "number",
220
                        "step": 0.1,
221
                        "optional": true,
222
                        "additionalParams": true,
223
                        "id": "chatAnthropic_0-input-topK-number"
224
                    },
225
                    {
226
                        "label": "Allow Image Uploads",
227
                        "name": "allowImageUploads",
228
                        "type": "boolean",
229
                        "description": "Automatically uses claude-3-* models when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent",
230
                        "default": false,
231
                        "optional": true,
232
                        "id": "chatAnthropic_0-input-allowImageUploads-boolean"
233
                    }
234
                ],
235
                "inputAnchors": [
236
                    {
237
                        "label": "Cache",
238
                        "name": "cache",
239
                        "type": "BaseCache",
240
                        "optional": true,
241
                        "id": "chatAnthropic_0-input-cache-BaseCache"
242
                    }
243
                ],
244
                "inputs": {
245
                    "cache": "",
246
                    "modelName": "claude-3-haiku",
247
                    "temperature": 0.9,
248
                    "maxTokensToSample": "",
249
                    "topP": "",
250
                    "topK": "",
251
                    "allowImageUploads": true
252
                },
253
                "outputAnchors": [
254
                    {
255
                        "id": "chatAnthropic_0-output-chatAnthropic-ChatAnthropic|BaseChatModel|BaseLanguageModel|Runnable",
256
                        "name": "chatAnthropic",
257
                        "label": "ChatAnthropic",
258
                        "type": "ChatAnthropic | BaseChatModel | BaseLanguageModel | Runnable"
259
                    }
260
                ],
261
                "outputs": {},
262
                "selected": false
263
            },
264
            "selected": false,
265
            "positionAbsolute": {
266
                "x": 585.3308245972187,
267
                "y": -116.32789506560908
268
            },
269
            "dragging": false
270
        },
271
        {
272
            "width": 300,
273
            "height": 688,
274
            "id": "chatPromptTemplate_0",
275
            "position": {
276
                "x": -106.44189698270114,
277
                "y": 20.133956087516538
278
            },
279
            "type": "customNode",
280
            "data": {
281
                "id": "chatPromptTemplate_0",
282
                "label": "Chat Prompt Template",
283
                "version": 1,
284
                "name": "chatPromptTemplate",
285
                "type": "ChatPromptTemplate",
286
                "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate", "Runnable"],
287
                "category": "Prompts",
288
                "description": "Schema to represent a chat prompt",
289
                "inputParams": [
290
                    {
291
                        "label": "System Message",
292
                        "name": "systemMessagePrompt",
293
                        "type": "string",
294
                        "rows": 4,
295
                        "placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.",
296
                        "id": "chatPromptTemplate_0-input-systemMessagePrompt-string"
297
                    },
298
                    {
299
                        "label": "Human Message",
300
                        "name": "humanMessagePrompt",
301
                        "type": "string",
302
                        "rows": 4,
303
                        "placeholder": "{text}",
304
                        "id": "chatPromptTemplate_0-input-humanMessagePrompt-string"
305
                    },
306
                    {
307
                        "label": "Format Prompt Values",
308
                        "name": "promptValues",
309
                        "type": "json",
310
                        "optional": true,
311
                        "acceptVariable": true,
312
                        "list": true,
313
                        "id": "chatPromptTemplate_0-input-promptValues-json"
314
                    }
315
                ],
316
                "inputAnchors": [],
317
                "inputs": {
318
                    "systemMessagePrompt": "The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\nThe AI has the following context:\n{context}",
319
                    "humanMessagePrompt": "{input}",
320
                    "promptValues": "{\"context\":\"{{plainText_0.data.instance}}\",\"input\":\"{{question}}\"}"
321
                },
322
                "outputAnchors": [
323
                    {
324
                        "id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable",
325
                        "name": "chatPromptTemplate",
326
                        "label": "ChatPromptTemplate",
327
                        "type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate | Runnable"
328
                    }
329
                ],
330
                "outputs": {},
331
                "selected": false
332
            },
333
            "selected": false,
334
            "positionAbsolute": {
335
                "x": -106.44189698270114,
336
                "y": 20.133956087516538
337
            },
338
            "dragging": false
339
        },
340
        {
341
            "width": 300,
342
            "height": 485,
343
            "id": "plainText_0",
344
            "position": {
345
                "x": -487.7511991135089,
346
                "y": 77.83838996645807
347
            },
348
            "type": "customNode",
349
            "data": {
350
                "id": "plainText_0",
351
                "label": "Plain Text",
352
                "version": 2,
353
                "name": "plainText",
354
                "type": "Document",
355
                "baseClasses": ["Document"],
356
                "category": "Document Loaders",
357
                "description": "Load data from plain text",
358
                "inputParams": [
359
                    {
360
                        "label": "Text",
361
                        "name": "text",
362
                        "type": "string",
363
                        "rows": 4,
364
                        "placeholder": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua...",
365
                        "id": "plainText_0-input-text-string"
366
                    },
367
                    {
368
                        "label": "Metadata",
369
                        "name": "metadata",
370
                        "type": "json",
371
                        "optional": true,
372
                        "additionalParams": true,
373
                        "id": "plainText_0-input-metadata-json"
374
                    }
375
                ],
376
                "inputAnchors": [
377
                    {
378
                        "label": "Text Splitter",
379
                        "name": "textSplitter",
380
                        "type": "TextSplitter",
381
                        "optional": true,
382
                        "id": "plainText_0-input-textSplitter-TextSplitter"
383
                    }
384
                ],
385
                "inputs": {
386
                    "text": "Welcome to Skyworld Hotel, where your dreams take flight and your stay soars to new heights. Nestled amidst breathtaking cityscape views, our upscale establishment offers an unparalleled blend of luxury and comfort. Our rooms are elegantly appointed, featuring modern amenities and plush furnishings to ensure your relaxation.\n\nIndulge in culinary delights at our rooftop restaurant, offering a gastronomic journey with panoramic vistas. Skyworld Hotel boasts state-of-the-art conference facilities, perfect for business travelers, and an inviting spa for relaxation seekers. Our attentive staff is dedicated to ensuring your every need is met, making your stay memorable.\n\nCentrally located, we offer easy access to local attractions, making us an ideal choice for both leisure and business travelers. Experience the world of hospitality like never before at Skyworld Hotel.",
387
                    "textSplitter": "",
388
                    "metadata": ""
389
                },
390
                "outputAnchors": [
391
                    {
392
                        "name": "output",
393
                        "label": "Output",
394
                        "type": "options",
395
                        "options": [
396
                            {
397
                                "id": "plainText_0-output-document-Document|json",
398
                                "name": "document",
399
                                "label": "Document",
400
                                "type": "Document | json"
401
                            },
402
                            {
403
                                "id": "plainText_0-output-text-string|json",
404
                                "name": "text",
405
                                "label": "Text",
406
                                "type": "string | json"
407
                            }
408
                        ],
409
                        "default": "document"
410
                    }
411
                ],
412
                "outputs": {
413
                    "output": "text"
414
                },
415
                "selected": false
416
            },
417
            "selected": false,
418
            "positionAbsolute": {
419
                "x": -487.7511991135089,
420
                "y": 77.83838996645807
421
            },
422
            "dragging": false
423
        }
424
    ],
425
    "edges": [
426
        {
427
            "source": "bufferMemory_0",
428
            "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
429
            "target": "conversationChain_0",
430
            "targetHandle": "conversationChain_0-input-memory-BaseMemory",
431
            "type": "buttonedge",
432
            "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationChain_0-conversationChain_0-input-memory-BaseMemory"
433
        },
434
        {
435
            "source": "chatAnthropic_0",
436
            "sourceHandle": "chatAnthropic_0-output-chatAnthropic-ChatAnthropic|BaseChatModel|BaseLanguageModel|Runnable",
437
            "target": "conversationChain_0",
438
            "targetHandle": "conversationChain_0-input-model-BaseChatModel",
439
            "type": "buttonedge",
440
            "id": "chatAnthropic_0-chatAnthropic_0-output-chatAnthropic-ChatAnthropic|BaseChatModel|BaseLanguageModel|Runnable-conversationChain_0-conversationChain_0-input-model-BaseChatModel"
441
        },
442
        {
443
            "source": "plainText_0",
444
            "sourceHandle": "plainText_0-output-text-string|json",
445
            "target": "chatPromptTemplate_0",
446
            "targetHandle": "chatPromptTemplate_0-input-promptValues-json",
447
            "type": "buttonedge",
448
            "id": "plainText_0-plainText_0-output-text-string|json-chatPromptTemplate_0-chatPromptTemplate_0-input-promptValues-json"
449
        },
450
        {
451
            "source": "chatPromptTemplate_0",
452
            "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable",
453
            "target": "conversationChain_0",
454
            "targetHandle": "conversationChain_0-input-chatPromptTemplate-ChatPromptTemplate",
455
            "type": "buttonedge",
456
            "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable-conversationChain_0-conversationChain_0-input-chatPromptTemplate-ChatPromptTemplate"
457
        }
458
    ]
459
}
460

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.