Flowise

Форк
0
674 строки · 27.2 Кб
1
{
2
    "description": "Use output from a chain as prompt for another chain",
3
    "categories": "Custom Tool,OpenAI,LLM Chain,Langchain",
4
    "framework": "Langchain",
5
    "nodes": [
6
        {
7
            "width": 300,
8
            "height": 475,
9
            "id": "promptTemplate_0",
10
            "position": {
11
                "x": 792.9464838535649,
12
                "y": 527.1718536712464
13
            },
14
            "type": "customNode",
15
            "data": {
16
                "id": "promptTemplate_0",
17
                "label": "Prompt Template",
18
                "version": 1,
19
                "name": "promptTemplate",
20
                "type": "PromptTemplate",
21
                "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
22
                "category": "Prompts",
23
                "description": "Schema to represent a basic prompt for an LLM",
24
                "inputParams": [
25
                    {
26
                        "label": "Template",
27
                        "name": "template",
28
                        "type": "string",
29
                        "rows": 4,
30
                        "placeholder": "What is a good name for a company that makes {product}?",
31
                        "id": "promptTemplate_0-input-template-string"
32
                    },
33
                    {
34
                        "label": "Format Prompt Values",
35
                        "name": "promptValues",
36
                        "type": "json",
37
                        "optional": true,
38
                        "acceptVariable": true,
39
                        "list": true,
40
                        "id": "promptTemplate_0-input-promptValues-json"
41
                    }
42
                ],
43
                "inputAnchors": [],
44
                "inputs": {
45
                    "template": "You are an AI who performs one task based on the following objective: {objective}.\nRespond with how you would complete this task:",
46
                    "promptValues": "{\"objective\":\"{{question}}\"}"
47
                },
48
                "outputAnchors": [
49
                    {
50
                        "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
51
                        "name": "promptTemplate",
52
                        "label": "PromptTemplate",
53
                        "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
54
                    }
55
                ],
56
                "outputs": {},
57
                "selected": false
58
            },
59
            "selected": false,
60
            "positionAbsolute": {
61
                "x": 792.9464838535649,
62
                "y": 527.1718536712464
63
            },
64
            "dragging": false
65
        },
66
        {
67
            "width": 300,
68
            "height": 475,
69
            "id": "promptTemplate_1",
70
            "position": {
71
                "x": 1571.0896874449775,
72
                "y": 522.8455116403258
73
            },
74
            "type": "customNode",
75
            "data": {
76
                "id": "promptTemplate_1",
77
                "label": "Prompt Template",
78
                "version": 1,
79
                "name": "promptTemplate",
80
                "type": "PromptTemplate",
81
                "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
82
                "category": "Prompts",
83
                "description": "Schema to represent a basic prompt for an LLM",
84
                "inputParams": [
85
                    {
86
                        "label": "Template",
87
                        "name": "template",
88
                        "type": "string",
89
                        "rows": 4,
90
                        "placeholder": "What is a good name for a company that makes {product}?",
91
                        "id": "promptTemplate_1-input-template-string"
92
                    },
93
                    {
94
                        "label": "Format Prompt Values",
95
                        "name": "promptValues",
96
                        "type": "json",
97
                        "optional": true,
98
                        "acceptVariable": true,
99
                        "list": true,
100
                        "id": "promptTemplate_1-input-promptValues-json"
101
                    }
102
                ],
103
                "inputAnchors": [],
104
                "inputs": {
105
                    "template": "You are a task creation AI that uses the result of an execution agent to create new tasks with the following objective: {objective}.\nThe last completed task has the result: {result}.\nBased on the result, create new tasks to be completed by the AI system that do not overlap with result.\nReturn the tasks as an array.",
106
                    "promptValues": "{\"objective\":\"{{question}}\",\"result\":\"{{llmChain_0.data.instance}}\"}"
107
                },
108
                "outputAnchors": [
109
                    {
110
                        "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
111
                        "name": "promptTemplate",
112
                        "label": "PromptTemplate",
113
                        "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
114
                    }
115
                ],
116
                "outputs": {},
117
                "selected": false
118
            },
119
            "positionAbsolute": {
120
                "x": 1571.0896874449775,
121
                "y": 522.8455116403258
122
            },
123
            "selected": false,
124
            "dragging": false
125
        },
126
        {
127
            "width": 300,
128
            "height": 574,
129
            "id": "openAI_1",
130
            "position": {
131
                "x": 791.6102007244282,
132
                "y": -83.71386876566092
133
            },
134
            "type": "customNode",
135
            "data": {
136
                "id": "openAI_1",
137
                "label": "OpenAI",
138
                "version": 4.0,
139
                "name": "openAI",
140
                "type": "OpenAI",
141
                "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
142
                "category": "LLMs",
143
                "description": "Wrapper around OpenAI large language models",
144
                "inputParams": [
145
                    {
146
                        "label": "Connect Credential",
147
                        "name": "credential",
148
                        "type": "credential",
149
                        "credentialNames": ["openAIApi"],
150
                        "id": "openAI_1-input-credential-credential"
151
                    },
152
                    {
153
                        "label": "Model Name",
154
                        "name": "modelName",
155
                        "type": "asyncOptions",
156
                        "loadMethod": "listModels",
157
                        "default": "gpt-3.5-turbo-instruct",
158
                        "id": "openAI_1-input-modelName-options"
159
                    },
160
                    {
161
                        "label": "Temperature",
162
                        "name": "temperature",
163
                        "type": "number",
164
                        "default": 0.7,
165
                        "optional": true,
166
                        "id": "openAI_1-input-temperature-number"
167
                    },
168
                    {
169
                        "label": "Max Tokens",
170
                        "name": "maxTokens",
171
                        "type": "number",
172
                        "optional": true,
173
                        "additionalParams": true,
174
                        "id": "openAI_1-input-maxTokens-number"
175
                    },
176
                    {
177
                        "label": "Top Probability",
178
                        "name": "topP",
179
                        "type": "number",
180
                        "optional": true,
181
                        "additionalParams": true,
182
                        "id": "openAI_1-input-topP-number"
183
                    },
184
                    {
185
                        "label": "Best Of",
186
                        "name": "bestOf",
187
                        "type": "number",
188
                        "optional": true,
189
                        "additionalParams": true,
190
                        "id": "openAI_1-input-bestOf-number"
191
                    },
192
                    {
193
                        "label": "Frequency Penalty",
194
                        "name": "frequencyPenalty",
195
                        "type": "number",
196
                        "optional": true,
197
                        "additionalParams": true,
198
                        "id": "openAI_1-input-frequencyPenalty-number"
199
                    },
200
                    {
201
                        "label": "Presence Penalty",
202
                        "name": "presencePenalty",
203
                        "type": "number",
204
                        "optional": true,
205
                        "additionalParams": true,
206
                        "id": "openAI_1-input-presencePenalty-number"
207
                    },
208
                    {
209
                        "label": "Batch Size",
210
                        "name": "batchSize",
211
                        "type": "number",
212
                        "optional": true,
213
                        "additionalParams": true,
214
                        "id": "openAI_1-input-batchSize-number"
215
                    },
216
                    {
217
                        "label": "Timeout",
218
                        "name": "timeout",
219
                        "type": "number",
220
                        "optional": true,
221
                        "additionalParams": true,
222
                        "id": "openAI_1-input-timeout-number"
223
                    },
224
                    {
225
                        "label": "BasePath",
226
                        "name": "basepath",
227
                        "type": "string",
228
                        "optional": true,
229
                        "additionalParams": true,
230
                        "id": "openAI_1-input-basepath-string"
231
                    }
232
                ],
233
                "inputAnchors": [
234
                    {
235
                        "label": "Cache",
236
                        "name": "cache",
237
                        "type": "BaseCache",
238
                        "optional": true,
239
                        "id": "openAI_1-input-cache-BaseCache"
240
                    }
241
                ],
242
                "inputs": {
243
                    "modelName": "gpt-3.5-turbo-instruct",
244
                    "temperature": 0.7,
245
                    "maxTokens": "",
246
                    "topP": "",
247
                    "bestOf": "",
248
                    "frequencyPenalty": "",
249
                    "presencePenalty": "",
250
                    "batchSize": "",
251
                    "timeout": "",
252
                    "basepath": ""
253
                },
254
                "outputAnchors": [
255
                    {
256
                        "id": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
257
                        "name": "openAI",
258
                        "label": "OpenAI",
259
                        "type": "OpenAI | BaseLLM | BaseLanguageModel"
260
                    }
261
                ],
262
                "outputs": {},
263
                "selected": false
264
            },
265
            "selected": false,
266
            "positionAbsolute": {
267
                "x": 791.6102007244282,
268
                "y": -83.71386876566092
269
            },
270
            "dragging": false
271
        },
272
        {
273
            "width": 300,
274
            "height": 574,
275
            "id": "openAI_2",
276
            "position": {
277
                "x": 1571.148617508543,
278
                "y": -90.37243748117169
279
            },
280
            "type": "customNode",
281
            "data": {
282
                "id": "openAI_2",
283
                "label": "OpenAI",
284
                "version": 4.0,
285
                "name": "openAI",
286
                "type": "OpenAI",
287
                "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
288
                "category": "LLMs",
289
                "description": "Wrapper around OpenAI large language models",
290
                "inputParams": [
291
                    {
292
                        "label": "Connect Credential",
293
                        "name": "credential",
294
                        "type": "credential",
295
                        "credentialNames": ["openAIApi"],
296
                        "id": "openAI_2-input-credential-credential"
297
                    },
298
                    {
299
                        "label": "Model Name",
300
                        "name": "modelName",
301
                        "type": "asyncOptions",
302
                        "loadMethod": "listModels",
303
                        "default": "gpt-3.5-turbo-instruct",
304
                        "id": "openAI_2-input-modelName-options"
305
                    },
306
                    {
307
                        "label": "Temperature",
308
                        "name": "temperature",
309
                        "type": "number",
310
                        "default": 0.7,
311
                        "optional": true,
312
                        "id": "openAI_2-input-temperature-number"
313
                    },
314
                    {
315
                        "label": "Max Tokens",
316
                        "name": "maxTokens",
317
                        "type": "number",
318
                        "optional": true,
319
                        "additionalParams": true,
320
                        "id": "openAI_2-input-maxTokens-number"
321
                    },
322
                    {
323
                        "label": "Top Probability",
324
                        "name": "topP",
325
                        "type": "number",
326
                        "optional": true,
327
                        "additionalParams": true,
328
                        "id": "openAI_2-input-topP-number"
329
                    },
330
                    {
331
                        "label": "Best Of",
332
                        "name": "bestOf",
333
                        "type": "number",
334
                        "optional": true,
335
                        "additionalParams": true,
336
                        "id": "openAI_2-input-bestOf-number"
337
                    },
338
                    {
339
                        "label": "Frequency Penalty",
340
                        "name": "frequencyPenalty",
341
                        "type": "number",
342
                        "optional": true,
343
                        "additionalParams": true,
344
                        "id": "openAI_2-input-frequencyPenalty-number"
345
                    },
346
                    {
347
                        "label": "Presence Penalty",
348
                        "name": "presencePenalty",
349
                        "type": "number",
350
                        "optional": true,
351
                        "additionalParams": true,
352
                        "id": "openAI_2-input-presencePenalty-number"
353
                    },
354
                    {
355
                        "label": "Batch Size",
356
                        "name": "batchSize",
357
                        "type": "number",
358
                        "optional": true,
359
                        "additionalParams": true,
360
                        "id": "openAI_2-input-batchSize-number"
361
                    },
362
                    {
363
                        "label": "Timeout",
364
                        "name": "timeout",
365
                        "type": "number",
366
                        "optional": true,
367
                        "additionalParams": true,
368
                        "id": "openAI_2-input-timeout-number"
369
                    },
370
                    {
371
                        "label": "BasePath",
372
                        "name": "basepath",
373
                        "type": "string",
374
                        "optional": true,
375
                        "additionalParams": true,
376
                        "id": "openAI_2-input-basepath-string"
377
                    }
378
                ],
379
                "inputAnchors": [
380
                    {
381
                        "label": "Cache",
382
                        "name": "cache",
383
                        "type": "BaseCache",
384
                        "optional": true,
385
                        "id": "openAI_2-input-cache-BaseCache"
386
                    }
387
                ],
388
                "inputs": {
389
                    "modelName": "gpt-3.5-turbo-instruct",
390
                    "temperature": 0.7,
391
                    "maxTokens": "",
392
                    "topP": "",
393
                    "bestOf": "",
394
                    "frequencyPenalty": "",
395
                    "presencePenalty": "",
396
                    "batchSize": "",
397
                    "timeout": "",
398
                    "basepath": ""
399
                },
400
                "outputAnchors": [
401
                    {
402
                        "id": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
403
                        "name": "openAI",
404
                        "label": "OpenAI",
405
                        "type": "OpenAI | BaseLLM | BaseLanguageModel"
406
                    }
407
                ],
408
                "outputs": {},
409
                "selected": false
410
            },
411
            "selected": false,
412
            "positionAbsolute": {
413
                "x": 1571.148617508543,
414
                "y": -90.37243748117169
415
            },
416
            "dragging": false
417
        },
418
        {
419
            "width": 300,
420
            "height": 456,
421
            "id": "llmChain_0",
422
            "position": {
423
                "x": 1183.0899727188096,
424
                "y": 385.0159960992951
425
            },
426
            "type": "customNode",
427
            "data": {
428
                "id": "llmChain_0",
429
                "label": "LLM Chain",
430
                "version": 3,
431
                "name": "llmChain",
432
                "type": "LLMChain",
433
                "baseClasses": ["LLMChain", "BaseChain", "Runnable"],
434
                "category": "Chains",
435
                "description": "Chain to run queries against LLMs",
436
                "inputParams": [
437
                    {
438
                        "label": "Chain Name",
439
                        "name": "chainName",
440
                        "type": "string",
441
                        "placeholder": "Name Your Chain",
442
                        "optional": true,
443
                        "id": "llmChain_0-input-chainName-string"
444
                    }
445
                ],
446
                "inputAnchors": [
447
                    {
448
                        "label": "Language Model",
449
                        "name": "model",
450
                        "type": "BaseLanguageModel",
451
                        "id": "llmChain_0-input-model-BaseLanguageModel"
452
                    },
453
                    {
454
                        "label": "Prompt",
455
                        "name": "prompt",
456
                        "type": "BasePromptTemplate",
457
                        "id": "llmChain_0-input-prompt-BasePromptTemplate"
458
                    },
459
                    {
460
                        "label": "Output Parser",
461
                        "name": "outputParser",
462
                        "type": "BaseLLMOutputParser",
463
                        "optional": true,
464
                        "id": "llmChain_0-input-outputParser-BaseLLMOutputParser"
465
                    },
466
                    {
467
                        "label": "Input Moderation",
468
                        "description": "Detect text that could generate harmful output and prevent it from being sent to the language model",
469
                        "name": "inputModeration",
470
                        "type": "Moderation",
471
                        "optional": true,
472
                        "list": true,
473
                        "id": "llmChain_0-input-inputModeration-Moderation"
474
                    }
475
                ],
476
                "inputs": {
477
                    "model": "{{openAI_1.data.instance}}",
478
                    "prompt": "{{promptTemplate_0.data.instance}}",
479
                    "outputParser": "",
480
                    "chainName": "FirstChain",
481
                    "inputModeration": ""
482
                },
483
                "outputAnchors": [
484
                    {
485
                        "name": "output",
486
                        "label": "Output",
487
                        "type": "options",
488
                        "options": [
489
                            {
490
                                "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable",
491
                                "name": "llmChain",
492
                                "label": "LLM Chain",
493
                                "type": "LLMChain | BaseChain | Runnable"
494
                            },
495
                            {
496
                                "id": "llmChain_0-output-outputPrediction-string|json",
497
                                "name": "outputPrediction",
498
                                "label": "Output Prediction",
499
                                "type": "string | json"
500
                            }
501
                        ],
502
                        "default": "llmChain"
503
                    }
504
                ],
505
                "outputs": {
506
                    "output": "outputPrediction"
507
                },
508
                "selected": false
509
            },
510
            "selected": false,
511
            "positionAbsolute": {
512
                "x": 1183.0899727188096,
513
                "y": 385.0159960992951
514
            },
515
            "dragging": false
516
        },
517
        {
518
            "width": 300,
519
            "height": 456,
520
            "id": "llmChain_1",
521
            "position": {
522
                "x": 1973.883197748518,
523
                "y": 370.7937277714931
524
            },
525
            "type": "customNode",
526
            "data": {
527
                "id": "llmChain_1",
528
                "label": "LLM Chain",
529
                "version": 3,
530
                "name": "llmChain",
531
                "type": "LLMChain",
532
                "baseClasses": ["LLMChain", "BaseChain", "Runnable"],
533
                "category": "Chains",
534
                "description": "Chain to run queries against LLMs",
535
                "inputParams": [
536
                    {
537
                        "label": "Chain Name",
538
                        "name": "chainName",
539
                        "type": "string",
540
                        "placeholder": "Name Your Chain",
541
                        "optional": true,
542
                        "id": "llmChain_1-input-chainName-string"
543
                    }
544
                ],
545
                "inputAnchors": [
546
                    {
547
                        "label": "Language Model",
548
                        "name": "model",
549
                        "type": "BaseLanguageModel",
550
                        "id": "llmChain_1-input-model-BaseLanguageModel"
551
                    },
552
                    {
553
                        "label": "Prompt",
554
                        "name": "prompt",
555
                        "type": "BasePromptTemplate",
556
                        "id": "llmChain_1-input-prompt-BasePromptTemplate"
557
                    },
558
                    {
559
                        "label": "Output Parser",
560
                        "name": "outputParser",
561
                        "type": "BaseLLMOutputParser",
562
                        "optional": true,
563
                        "id": "llmChain_1-input-outputParser-BaseLLMOutputParser"
564
                    },
565
                    {
566
                        "label": "Input Moderation",
567
                        "description": "Detect text that could generate harmful output and prevent it from being sent to the language model",
568
                        "name": "inputModeration",
569
                        "type": "Moderation",
570
                        "optional": true,
571
                        "list": true,
572
                        "id": "llmChain_1-input-inputModeration-Moderation"
573
                    }
574
                ],
575
                "inputs": {
576
                    "model": "{{openAI_2.data.instance}}",
577
                    "prompt": "{{promptTemplate_1.data.instance}}",
578
                    "outputParser": "",
579
                    "chainName": "LastChain",
580
                    "inputModeration": ""
581
                },
582
                "outputAnchors": [
583
                    {
584
                        "name": "output",
585
                        "label": "Output",
586
                        "type": "options",
587
                        "options": [
588
                            {
589
                                "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable",
590
                                "name": "llmChain",
591
                                "label": "LLM Chain",
592
                                "type": "LLMChain | BaseChain | Runnable"
593
                            },
594
                            {
595
                                "id": "llmChain_1-output-outputPrediction-string|json",
596
                                "name": "outputPrediction",
597
                                "label": "Output Prediction",
598
                                "type": "string | json"
599
                            }
600
                        ],
601
                        "default": "llmChain"
602
                    }
603
                ],
604
                "outputs": {
605
                    "output": "llmChain"
606
                },
607
                "selected": false
608
            },
609
            "selected": false,
610
            "positionAbsolute": {
611
                "x": 1973.883197748518,
612
                "y": 370.7937277714931
613
            },
614
            "dragging": false
615
        }
616
    ],
617
    "edges": [
618
        {
619
            "source": "openAI_1",
620
            "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
621
            "target": "llmChain_0",
622
            "targetHandle": "llmChain_0-input-model-BaseLanguageModel",
623
            "type": "buttonedge",
624
            "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel",
625
            "data": {
626
                "label": ""
627
            }
628
        },
629
        {
630
            "source": "promptTemplate_0",
631
            "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
632
            "target": "llmChain_0",
633
            "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
634
            "type": "buttonedge",
635
            "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
636
            "data": {
637
                "label": ""
638
            }
639
        },
640
        {
641
            "source": "llmChain_0",
642
            "sourceHandle": "llmChain_0-output-outputPrediction-string|json",
643
            "target": "promptTemplate_1",
644
            "targetHandle": "promptTemplate_1-input-promptValues-json",
645
            "type": "buttonedge",
646
            "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-promptTemplate_1-promptTemplate_1-input-promptValues-json",
647
            "data": {
648
                "label": ""
649
            }
650
        },
651
        {
652
            "source": "promptTemplate_1",
653
            "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
654
            "target": "llmChain_1",
655
            "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
656
            "type": "buttonedge",
657
            "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
658
            "data": {
659
                "label": ""
660
            }
661
        },
662
        {
663
            "source": "openAI_2",
664
            "sourceHandle": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
665
            "target": "llmChain_1",
666
            "targetHandle": "llmChain_1-input-model-BaseLanguageModel",
667
            "type": "buttonedge",
668
            "id": "openAI_2-openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_1-llmChain_1-input-model-BaseLanguageModel",
669
            "data": {
670
                "label": ""
671
            }
672
        }
673
    ]
674
}
675

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.