Flowise

Форк
0
1139 строк · 47.0 Кб
1
{
2
    "description": "Split flows based on if else condition",
3
    "categories": "IfElse Function,ChatOpenAI,OpenAI,LLM Chain,Langchain",
4
    "framework": "Langchain",
5
    "badge": "new",
6
    "nodes": [
7
        {
8
            "width": 300,
9
            "height": 511,
10
            "id": "promptTemplate_0",
11
            "position": {
12
                "x": 792.9464838535649,
13
                "y": 527.1718536712464
14
            },
15
            "type": "customNode",
16
            "data": {
17
                "id": "promptTemplate_0",
18
                "label": "Prompt Template",
19
                "version": 1,
20
                "name": "promptTemplate",
21
                "type": "PromptTemplate",
22
                "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
23
                "category": "Prompts",
24
                "description": "Schema to represent a basic prompt for an LLM",
25
                "inputParams": [
26
                    {
27
                        "label": "Template",
28
                        "name": "template",
29
                        "type": "string",
30
                        "rows": 4,
31
                        "placeholder": "What is a good name for a company that makes {product}?",
32
                        "id": "promptTemplate_0-input-template-string"
33
                    },
34
                    {
35
                        "label": "Format Prompt Values",
36
                        "name": "promptValues",
37
                        "type": "json",
38
                        "optional": true,
39
                        "acceptVariable": true,
40
                        "list": true,
41
                        "id": "promptTemplate_0-input-promptValues-json"
42
                    }
43
                ],
44
                "inputAnchors": [],
45
                "inputs": {
46
                    "template": "You are an AI who performs one task based on the following objective: {objective}.\nRespond with how you would complete this task:",
47
                    "promptValues": "{\"objective\":\"{{question}}\"}"
48
                },
49
                "outputAnchors": [
50
                    {
51
                        "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
52
                        "name": "promptTemplate",
53
                        "label": "PromptTemplate",
54
                        "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
55
                    }
56
                ],
57
                "outputs": {},
58
                "selected": false
59
            },
60
            "selected": false,
61
            "positionAbsolute": {
62
                "x": 792.9464838535649,
63
                "y": 527.1718536712464
64
            },
65
            "dragging": false
66
        },
67
        {
68
            "width": 300,
69
            "height": 511,
70
            "id": "promptTemplate_1",
71
            "position": {
72
                "x": 1995.1328578238122,
73
                "y": -14.648035759690174
74
            },
75
            "type": "customNode",
76
            "data": {
77
                "id": "promptTemplate_1",
78
                "label": "Prompt Template",
79
                "version": 1,
80
                "name": "promptTemplate",
81
                "type": "PromptTemplate",
82
                "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
83
                "category": "Prompts",
84
                "description": "Schema to represent a basic prompt for an LLM",
85
                "inputParams": [
86
                    {
87
                        "label": "Template",
88
                        "name": "template",
89
                        "type": "string",
90
                        "rows": 4,
91
                        "placeholder": "What is a good name for a company that makes {product}?",
92
                        "id": "promptTemplate_1-input-template-string"
93
                    },
94
                    {
95
                        "label": "Format Prompt Values",
96
                        "name": "promptValues",
97
                        "type": "json",
98
                        "optional": true,
99
                        "acceptVariable": true,
100
                        "list": true,
101
                        "id": "promptTemplate_1-input-promptValues-json"
102
                    }
103
                ],
104
                "inputAnchors": [],
105
                "inputs": {
106
                    "template": "You are a task creation AI that uses the result of an execution agent to create new tasks with the following objective: {objective}.\nThe last completed task has the result: {result}.\nBased on the result, create new tasks to be completed by the AI system that do not overlap with result.\nReturn the tasks as an array.",
107
                    "promptValues": "{\"objective\":\"{{question}}\",\"result\":\"{{ifElseFunction_0.data.instance}}\"}"
108
                },
109
                "outputAnchors": [
110
                    {
111
                        "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
112
                        "name": "promptTemplate",
113
                        "label": "PromptTemplate",
114
                        "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
115
                    }
116
                ],
117
                "outputs": {},
118
                "selected": false
119
            },
120
            "positionAbsolute": {
121
                "x": 1995.1328578238122,
122
                "y": -14.648035759690174
123
            },
124
            "selected": false,
125
            "dragging": false
126
        },
127
        {
128
            "width": 300,
129
            "height": 574,
130
            "id": "openAI_1",
131
            "position": {
132
                "x": 791.6102007244282,
133
                "y": -83.71386876566092
134
            },
135
            "type": "customNode",
136
            "data": {
137
                "id": "openAI_1",
138
                "label": "OpenAI",
139
                "version": 4.0,
140
                "name": "openAI",
141
                "type": "OpenAI",
142
                "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
143
                "category": "LLMs",
144
                "description": "Wrapper around OpenAI large language models",
145
                "inputParams": [
146
                    {
147
                        "label": "Connect Credential",
148
                        "name": "credential",
149
                        "type": "credential",
150
                        "credentialNames": ["openAIApi"],
151
                        "id": "openAI_1-input-credential-credential"
152
                    },
153
                    {
154
                        "label": "Model Name",
155
                        "name": "modelName",
156
                        "type": "asyncOptions",
157
                        "loadMethod": "listModels",
158
                        "default": "gpt-3.5-turbo-instruct",
159
                        "id": "openAI_1-input-modelName-options"
160
                    },
161
                    {
162
                        "label": "Temperature",
163
                        "name": "temperature",
164
                        "type": "number",
165
                        "default": 0.7,
166
                        "optional": true,
167
                        "id": "openAI_1-input-temperature-number"
168
                    },
169
                    {
170
                        "label": "Max Tokens",
171
                        "name": "maxTokens",
172
                        "type": "number",
173
                        "optional": true,
174
                        "additionalParams": true,
175
                        "id": "openAI_1-input-maxTokens-number"
176
                    },
177
                    {
178
                        "label": "Top Probability",
179
                        "name": "topP",
180
                        "type": "number",
181
                        "optional": true,
182
                        "additionalParams": true,
183
                        "id": "openAI_1-input-topP-number"
184
                    },
185
                    {
186
                        "label": "Best Of",
187
                        "name": "bestOf",
188
                        "type": "number",
189
                        "optional": true,
190
                        "additionalParams": true,
191
                        "id": "openAI_1-input-bestOf-number"
192
                    },
193
                    {
194
                        "label": "Frequency Penalty",
195
                        "name": "frequencyPenalty",
196
                        "type": "number",
197
                        "optional": true,
198
                        "additionalParams": true,
199
                        "id": "openAI_1-input-frequencyPenalty-number"
200
                    },
201
                    {
202
                        "label": "Presence Penalty",
203
                        "name": "presencePenalty",
204
                        "type": "number",
205
                        "optional": true,
206
                        "additionalParams": true,
207
                        "id": "openAI_1-input-presencePenalty-number"
208
                    },
209
                    {
210
                        "label": "Batch Size",
211
                        "name": "batchSize",
212
                        "type": "number",
213
                        "optional": true,
214
                        "additionalParams": true,
215
                        "id": "openAI_1-input-batchSize-number"
216
                    },
217
                    {
218
                        "label": "Timeout",
219
                        "name": "timeout",
220
                        "type": "number",
221
                        "optional": true,
222
                        "additionalParams": true,
223
                        "id": "openAI_1-input-timeout-number"
224
                    },
225
                    {
226
                        "label": "BasePath",
227
                        "name": "basepath",
228
                        "type": "string",
229
                        "optional": true,
230
                        "additionalParams": true,
231
                        "id": "openAI_1-input-basepath-string"
232
                    }
233
                ],
234
                "inputAnchors": [
235
                    {
236
                        "label": "Cache",
237
                        "name": "cache",
238
                        "type": "BaseCache",
239
                        "optional": true,
240
                        "id": "openAI_1-input-cache-BaseCache"
241
                    }
242
                ],
243
                "inputs": {
244
                    "modelName": "gpt-3.5-turbo-instruct",
245
                    "temperature": 0.7,
246
                    "maxTokens": "",
247
                    "topP": "",
248
                    "bestOf": "",
249
                    "frequencyPenalty": "",
250
                    "presencePenalty": "",
251
                    "batchSize": "",
252
                    "timeout": "",
253
                    "basepath": ""
254
                },
255
                "outputAnchors": [
256
                    {
257
                        "id": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
258
                        "name": "openAI",
259
                        "label": "OpenAI",
260
                        "type": "OpenAI | BaseLLM | BaseLanguageModel"
261
                    }
262
                ],
263
                "outputs": {},
264
                "selected": false
265
            },
266
            "selected": false,
267
            "positionAbsolute": {
268
                "x": 791.6102007244282,
269
                "y": -83.71386876566092
270
            },
271
            "dragging": false
272
        },
273
        {
274
            "width": 300,
275
            "height": 574,
276
            "id": "openAI_2",
277
            "position": {
278
                "x": 2340.5995455075863,
279
                "y": -310.7609446553905
280
            },
281
            "type": "customNode",
282
            "data": {
283
                "id": "openAI_2",
284
                "label": "OpenAI",
285
                "version": 4.0,
286
                "name": "openAI",
287
                "type": "OpenAI",
288
                "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
289
                "category": "LLMs",
290
                "description": "Wrapper around OpenAI large language models",
291
                "inputParams": [
292
                    {
293
                        "label": "Connect Credential",
294
                        "name": "credential",
295
                        "type": "credential",
296
                        "credentialNames": ["openAIApi"],
297
                        "id": "openAI_2-input-credential-credential"
298
                    },
299
                    {
300
                        "label": "Model Name",
301
                        "name": "modelName",
302
                        "type": "asyncOptions",
303
                        "loadMethod": "listModels",
304
                        "default": "gpt-3.5-turbo-instruct",
305
                        "id": "openAI_2-input-modelName-options"
306
                    },
307
                    {
308
                        "label": "Temperature",
309
                        "name": "temperature",
310
                        "type": "number",
311
                        "default": 0.7,
312
                        "optional": true,
313
                        "id": "openAI_2-input-temperature-number"
314
                    },
315
                    {
316
                        "label": "Max Tokens",
317
                        "name": "maxTokens",
318
                        "type": "number",
319
                        "optional": true,
320
                        "additionalParams": true,
321
                        "id": "openAI_2-input-maxTokens-number"
322
                    },
323
                    {
324
                        "label": "Top Probability",
325
                        "name": "topP",
326
                        "type": "number",
327
                        "optional": true,
328
                        "additionalParams": true,
329
                        "id": "openAI_2-input-topP-number"
330
                    },
331
                    {
332
                        "label": "Best Of",
333
                        "name": "bestOf",
334
                        "type": "number",
335
                        "optional": true,
336
                        "additionalParams": true,
337
                        "id": "openAI_2-input-bestOf-number"
338
                    },
339
                    {
340
                        "label": "Frequency Penalty",
341
                        "name": "frequencyPenalty",
342
                        "type": "number",
343
                        "optional": true,
344
                        "additionalParams": true,
345
                        "id": "openAI_2-input-frequencyPenalty-number"
346
                    },
347
                    {
348
                        "label": "Presence Penalty",
349
                        "name": "presencePenalty",
350
                        "type": "number",
351
                        "optional": true,
352
                        "additionalParams": true,
353
                        "id": "openAI_2-input-presencePenalty-number"
354
                    },
355
                    {
356
                        "label": "Batch Size",
357
                        "name": "batchSize",
358
                        "type": "number",
359
                        "optional": true,
360
                        "additionalParams": true,
361
                        "id": "openAI_2-input-batchSize-number"
362
                    },
363
                    {
364
                        "label": "Timeout",
365
                        "name": "timeout",
366
                        "type": "number",
367
                        "optional": true,
368
                        "additionalParams": true,
369
                        "id": "openAI_2-input-timeout-number"
370
                    },
371
                    {
372
                        "label": "BasePath",
373
                        "name": "basepath",
374
                        "type": "string",
375
                        "optional": true,
376
                        "additionalParams": true,
377
                        "id": "openAI_2-input-basepath-string"
378
                    }
379
                ],
380
                "inputAnchors": [
381
                    {
382
                        "label": "Cache",
383
                        "name": "cache",
384
                        "type": "BaseCache",
385
                        "optional": true,
386
                        "id": "openAI_2-input-cache-BaseCache"
387
                    }
388
                ],
389
                "inputs": {
390
                    "modelName": "gpt-3.5-turbo-instruct",
391
                    "temperature": 0.7,
392
                    "maxTokens": "",
393
                    "topP": "",
394
                    "bestOf": "",
395
                    "frequencyPenalty": "",
396
                    "presencePenalty": "",
397
                    "batchSize": "",
398
                    "timeout": "",
399
                    "basepath": ""
400
                },
401
                "outputAnchors": [
402
                    {
403
                        "id": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
404
                        "name": "openAI",
405
                        "label": "OpenAI",
406
                        "type": "OpenAI | BaseLLM | BaseLanguageModel"
407
                    }
408
                ],
409
                "outputs": {},
410
                "selected": false
411
            },
412
            "selected": false,
413
            "positionAbsolute": {
414
                "x": 2340.5995455075863,
415
                "y": -310.7609446553905
416
            },
417
            "dragging": false
418
        },
419
        {
420
            "width": 300,
421
            "height": 456,
422
            "id": "llmChain_0",
423
            "position": {
424
                "x": 1183.0899727188096,
425
                "y": 385.0159960992951
426
            },
427
            "type": "customNode",
428
            "data": {
429
                "id": "llmChain_0",
430
                "label": "LLM Chain",
431
                "version": 3,
432
                "name": "llmChain",
433
                "type": "LLMChain",
434
                "baseClasses": ["LLMChain", "BaseChain", "Runnable"],
435
                "category": "Chains",
436
                "description": "Chain to run queries against LLMs",
437
                "inputParams": [
438
                    {
439
                        "label": "Chain Name",
440
                        "name": "chainName",
441
                        "type": "string",
442
                        "placeholder": "Name Your Chain",
443
                        "optional": true,
444
                        "id": "llmChain_0-input-chainName-string"
445
                    }
446
                ],
447
                "inputAnchors": [
448
                    {
449
                        "label": "Language Model",
450
                        "name": "model",
451
                        "type": "BaseLanguageModel",
452
                        "id": "llmChain_0-input-model-BaseLanguageModel"
453
                    },
454
                    {
455
                        "label": "Prompt",
456
                        "name": "prompt",
457
                        "type": "BasePromptTemplate",
458
                        "id": "llmChain_0-input-prompt-BasePromptTemplate"
459
                    },
460
                    {
461
                        "label": "Output Parser",
462
                        "name": "outputParser",
463
                        "type": "BaseLLMOutputParser",
464
                        "optional": true,
465
                        "id": "llmChain_0-input-outputParser-BaseLLMOutputParser"
466
                    },
467
                    {
468
                        "label": "Input Moderation",
469
                        "description": "Detect text that could generate harmful output and prevent it from being sent to the language model",
470
                        "name": "inputModeration",
471
                        "type": "Moderation",
472
                        "optional": true,
473
                        "list": true,
474
                        "id": "llmChain_0-input-inputModeration-Moderation"
475
                    }
476
                ],
477
                "inputs": {
478
                    "model": "{{openAI_1.data.instance}}",
479
                    "prompt": "{{promptTemplate_0.data.instance}}",
480
                    "outputParser": "",
481
                    "chainName": "FirstChain",
482
                    "inputModeration": ""
483
                },
484
                "outputAnchors": [
485
                    {
486
                        "name": "output",
487
                        "label": "Output",
488
                        "type": "options",
489
                        "options": [
490
                            {
491
                                "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable",
492
                                "name": "llmChain",
493
                                "label": "LLM Chain",
494
                                "type": "LLMChain | BaseChain | Runnable"
495
                            },
496
                            {
497
                                "id": "llmChain_0-output-outputPrediction-string|json",
498
                                "name": "outputPrediction",
499
                                "label": "Output Prediction",
500
                                "type": "string | json"
501
                            }
502
                        ],
503
                        "default": "llmChain"
504
                    }
505
                ],
506
                "outputs": {
507
                    "output": "outputPrediction"
508
                },
509
                "selected": false
510
            },
511
            "selected": false,
512
            "positionAbsolute": {
513
                "x": 1183.0899727188096,
514
                "y": 385.0159960992951
515
            },
516
            "dragging": false
517
        },
518
        {
519
            "width": 300,
520
            "height": 456,
521
            "id": "llmChain_1",
522
            "position": {
523
                "x": 2773.675809586143,
524
                "y": 114.39482869328754
525
            },
526
            "type": "customNode",
527
            "data": {
528
                "id": "llmChain_1",
529
                "label": "LLM Chain",
530
                "version": 3,
531
                "name": "llmChain",
532
                "type": "LLMChain",
533
                "baseClasses": ["LLMChain", "BaseChain", "Runnable"],
534
                "category": "Chains",
535
                "description": "Chain to run queries against LLMs",
536
                "inputParams": [
537
                    {
538
                        "label": "Chain Name",
539
                        "name": "chainName",
540
                        "type": "string",
541
                        "placeholder": "Name Your Chain",
542
                        "optional": true,
543
                        "id": "llmChain_1-input-chainName-string"
544
                    }
545
                ],
546
                "inputAnchors": [
547
                    {
548
                        "label": "Language Model",
549
                        "name": "model",
550
                        "type": "BaseLanguageModel",
551
                        "id": "llmChain_1-input-model-BaseLanguageModel"
552
                    },
553
                    {
554
                        "label": "Prompt",
555
                        "name": "prompt",
556
                        "type": "BasePromptTemplate",
557
                        "id": "llmChain_1-input-prompt-BasePromptTemplate"
558
                    },
559
                    {
560
                        "label": "Output Parser",
561
                        "name": "outputParser",
562
                        "type": "BaseLLMOutputParser",
563
                        "optional": true,
564
                        "id": "llmChain_1-input-outputParser-BaseLLMOutputParser"
565
                    },
566
                    {
567
                        "label": "Input Moderation",
568
                        "description": "Detect text that could generate harmful output and prevent it from being sent to the language model",
569
                        "name": "inputModeration",
570
                        "type": "Moderation",
571
                        "optional": true,
572
                        "list": true,
573
                        "id": "llmChain_1-input-inputModeration-Moderation"
574
                    }
575
                ],
576
                "inputs": {
577
                    "model": "{{openAI_2.data.instance}}",
578
                    "prompt": "{{promptTemplate_1.data.instance}}",
579
                    "outputParser": "",
580
                    "chainName": "LastChain",
581
                    "inputModeration": ""
582
                },
583
                "outputAnchors": [
584
                    {
585
                        "name": "output",
586
                        "label": "Output",
587
                        "type": "options",
588
                        "options": [
589
                            {
590
                                "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable",
591
                                "name": "llmChain",
592
                                "label": "LLM Chain",
593
                                "type": "LLMChain | BaseChain | Runnable"
594
                            },
595
                            {
596
                                "id": "llmChain_1-output-outputPrediction-string|json",
597
                                "name": "outputPrediction",
598
                                "label": "Output Prediction",
599
                                "type": "string | json"
600
                            }
601
                        ],
602
                        "default": "llmChain"
603
                    }
604
                ],
605
                "outputs": {
606
                    "output": "llmChain"
607
                },
608
                "selected": false
609
            },
610
            "selected": false,
611
            "positionAbsolute": {
612
                "x": 2773.675809586143,
613
                "y": 114.39482869328754
614
            },
615
            "dragging": false
616
        },
617
        {
618
            "width": 300,
619
            "height": 511,
620
            "id": "promptTemplate_2",
621
            "position": {
622
                "x": 1992.5456174373144,
623
                "y": 675.5277193898106
624
            },
625
            "type": "customNode",
626
            "data": {
627
                "id": "promptTemplate_2",
628
                "label": "Prompt Template",
629
                "version": 1,
630
                "name": "promptTemplate",
631
                "type": "PromptTemplate",
632
                "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"],
633
                "category": "Prompts",
634
                "description": "Schema to represent a basic prompt for an LLM",
635
                "inputParams": [
636
                    {
637
                        "label": "Template",
638
                        "name": "template",
639
                        "type": "string",
640
                        "rows": 4,
641
                        "placeholder": "What is a good name for a company that makes {product}?",
642
                        "id": "promptTemplate_2-input-template-string"
643
                    },
644
                    {
645
                        "label": "Format Prompt Values",
646
                        "name": "promptValues",
647
                        "type": "json",
648
                        "optional": true,
649
                        "acceptVariable": true,
650
                        "list": true,
651
                        "id": "promptTemplate_2-input-promptValues-json"
652
                    }
653
                ],
654
                "inputAnchors": [],
655
                "inputs": {
656
                    "template": "Politely say \"I'm not able to answer the query\"",
657
                    "promptValues": "{\"objective\":\"{{question}}\",\"result\":\"\"}"
658
                },
659
                "outputAnchors": [
660
                    {
661
                        "id": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
662
                        "name": "promptTemplate",
663
                        "label": "PromptTemplate",
664
                        "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate"
665
                    }
666
                ],
667
                "outputs": {},
668
                "selected": false
669
            },
670
            "positionAbsolute": {
671
                "x": 1992.5456174373144,
672
                "y": 675.5277193898106
673
            },
674
            "selected": false,
675
            "dragging": false
676
        },
677
        {
678
            "width": 300,
679
            "height": 507,
680
            "id": "llmChain_2",
681
            "position": {
682
                "x": 2830.477603228176,
683
                "y": 907.9116984679802
684
            },
685
            "type": "customNode",
686
            "data": {
687
                "id": "llmChain_2",
688
                "label": "LLM Chain",
689
                "version": 3,
690
                "name": "llmChain",
691
                "type": "LLMChain",
692
                "baseClasses": ["LLMChain", "BaseChain", "Runnable"],
693
                "category": "Chains",
694
                "description": "Chain to run queries against LLMs",
695
                "inputParams": [
696
                    {
697
                        "label": "Chain Name",
698
                        "name": "chainName",
699
                        "type": "string",
700
                        "placeholder": "Name Your Chain",
701
                        "optional": true,
702
                        "id": "llmChain_2-input-chainName-string"
703
                    }
704
                ],
705
                "inputAnchors": [
706
                    {
707
                        "label": "Language Model",
708
                        "name": "model",
709
                        "type": "BaseLanguageModel",
710
                        "id": "llmChain_2-input-model-BaseLanguageModel"
711
                    },
712
                    {
713
                        "label": "Prompt",
714
                        "name": "prompt",
715
                        "type": "BasePromptTemplate",
716
                        "id": "llmChain_2-input-prompt-BasePromptTemplate"
717
                    },
718
                    {
719
                        "label": "Output Parser",
720
                        "name": "outputParser",
721
                        "type": "BaseLLMOutputParser",
722
                        "optional": true,
723
                        "id": "llmChain_2-input-outputParser-BaseLLMOutputParser"
724
                    },
725
                    {
726
                        "label": "Input Moderation",
727
                        "description": "Detect text that could generate harmful output and prevent it from being sent to the language model",
728
                        "name": "inputModeration",
729
                        "type": "Moderation",
730
                        "optional": true,
731
                        "list": true,
732
                        "id": "llmChain_2-input-inputModeration-Moderation"
733
                    }
734
                ],
735
                "inputs": {
736
                    "model": "{{chatOpenAI_0.data.instance}}",
737
                    "prompt": "{{promptTemplate_2.data.instance}}",
738
                    "outputParser": "",
739
                    "chainName": "FallbackChain",
740
                    "inputModeration": ""
741
                },
742
                "outputAnchors": [
743
                    {
744
                        "name": "output",
745
                        "label": "Output",
746
                        "type": "options",
747
                        "options": [
748
                            {
749
                                "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|Runnable",
750
                                "name": "llmChain",
751
                                "label": "LLM Chain",
752
                                "type": "LLMChain | BaseChain | Runnable"
753
                            },
754
                            {
755
                                "id": "llmChain_2-output-outputPrediction-string|json",
756
                                "name": "outputPrediction",
757
                                "label": "Output Prediction",
758
                                "type": "string | json"
759
                            }
760
                        ],
761
                        "default": "llmChain"
762
                    }
763
                ],
764
                "outputs": {
765
                    "output": "llmChain"
766
                },
767
                "selected": false
768
            },
769
            "selected": false,
770
            "positionAbsolute": {
771
                "x": 2830.477603228176,
772
                "y": 907.9116984679802
773
            },
774
            "dragging": false
775
        },
776
        {
777
            "width": 300,
778
            "height": 755,
779
            "id": "ifElseFunction_0",
780
            "position": {
781
                "x": 1590.6560099561739,
782
                "y": 265.36655719326177
783
            },
784
            "type": "customNode",
785
            "data": {
786
                "id": "ifElseFunction_0",
787
                "label": "IfElse Function",
788
                "version": 1,
789
                "name": "ifElseFunction",
790
                "type": "IfElseFunction",
791
                "baseClasses": ["IfElseFunction", "Utilities"],
792
                "category": "Utilities",
793
                "description": "Split flows based on If Else javascript functions",
794
                "inputParams": [
795
                    {
796
                        "label": "Input Variables",
797
                        "name": "functionInputVariables",
798
                        "description": "Input variables can be used in the function with prefix $. For example: $var",
799
                        "type": "json",
800
                        "optional": true,
801
                        "acceptVariable": true,
802
                        "list": true,
803
                        "id": "ifElseFunction_0-input-functionInputVariables-json"
804
                    },
805
                    {
806
                        "label": "IfElse Name",
807
                        "name": "functionName",
808
                        "type": "string",
809
                        "optional": true,
810
                        "placeholder": "If Condition Match",
811
                        "id": "ifElseFunction_0-input-functionName-string"
812
                    },
813
                    {
814
                        "label": "If Function",
815
                        "name": "ifFunction",
816
                        "description": "Function must return a value",
817
                        "type": "code",
818
                        "rows": 2,
819
                        "default": "if (\"hello\" == \"hello\") {\n    return true;\n}",
820
                        "id": "ifElseFunction_0-input-ifFunction-code"
821
                    },
822
                    {
823
                        "label": "Else Function",
824
                        "name": "elseFunction",
825
                        "description": "Function must return a value",
826
                        "type": "code",
827
                        "rows": 2,
828
                        "default": "return false;",
829
                        "id": "ifElseFunction_0-input-elseFunction-code"
830
                    }
831
                ],
832
                "inputAnchors": [],
833
                "inputs": {
834
                    "functionInputVariables": "{\"task\":\"{{llmChain_0.data.instance}}\"}",
835
                    "functionName": "If Condition Match",
836
                    "ifFunction": "if (\"hello\" == \"21\") {\n    return $task;\n}",
837
                    "elseFunction": "return false;"
838
                },
839
                "outputAnchors": [
840
                    {
841
                        "name": "output",
842
                        "label": "Output",
843
                        "type": "options",
844
                        "options": [
845
                            {
846
                                "id": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array",
847
                                "name": "returnTrue",
848
                                "label": "True",
849
                                "type": "string | number | boolean | json | array"
850
                            },
851
                            {
852
                                "id": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array",
853
                                "name": "returnFalse",
854
                                "label": "False",
855
                                "type": "string | number | boolean | json | array"
856
                            }
857
                        ],
858
                        "default": "returnTrue"
859
                    }
860
                ],
861
                "outputs": {
862
                    "output": "returnTrue"
863
                },
864
                "selected": false
865
            },
866
            "selected": false,
867
            "positionAbsolute": {
868
                "x": 1590.6560099561739,
869
                "y": 265.36655719326177
870
            },
871
            "dragging": false
872
        },
873
        {
874
            "width": 300,
875
            "height": 574,
876
            "id": "chatOpenAI_0",
877
            "position": {
878
                "x": 2373.5711587130127,
879
                "y": 487.8533802540226
880
            },
881
            "type": "customNode",
882
            "data": {
883
                "id": "chatOpenAI_0",
884
                "label": "ChatOpenAI",
885
                "version": 6.0,
886
                "name": "chatOpenAI",
887
                "type": "ChatOpenAI",
888
                "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"],
889
                "category": "Chat Models",
890
                "description": "Wrapper around OpenAI large language models that use the Chat endpoint",
891
                "inputParams": [
892
                    {
893
                        "label": "Connect Credential",
894
                        "name": "credential",
895
                        "type": "credential",
896
                        "credentialNames": ["openAIApi"],
897
                        "id": "chatOpenAI_0-input-credential-credential"
898
                    },
899
                    {
900
                        "label": "Model Name",
901
                        "name": "modelName",
902
                        "type": "asyncOptions",
903
                        "loadMethod": "listModels",
904
                        "default": "gpt-3.5-turbo",
905
                        "id": "chatOpenAI_0-input-modelName-options"
906
                    },
907
                    {
908
                        "label": "Temperature",
909
                        "name": "temperature",
910
                        "type": "number",
911
                        "step": 0.1,
912
                        "default": 0.9,
913
                        "optional": true,
914
                        "id": "chatOpenAI_0-input-temperature-number"
915
                    },
916
                    {
917
                        "label": "Max Tokens",
918
                        "name": "maxTokens",
919
                        "type": "number",
920
                        "step": 1,
921
                        "optional": true,
922
                        "additionalParams": true,
923
                        "id": "chatOpenAI_0-input-maxTokens-number"
924
                    },
925
                    {
926
                        "label": "Top Probability",
927
                        "name": "topP",
928
                        "type": "number",
929
                        "step": 0.1,
930
                        "optional": true,
931
                        "additionalParams": true,
932
                        "id": "chatOpenAI_0-input-topP-number"
933
                    },
934
                    {
935
                        "label": "Frequency Penalty",
936
                        "name": "frequencyPenalty",
937
                        "type": "number",
938
                        "step": 0.1,
939
                        "optional": true,
940
                        "additionalParams": true,
941
                        "id": "chatOpenAI_0-input-frequencyPenalty-number"
942
                    },
943
                    {
944
                        "label": "Presence Penalty",
945
                        "name": "presencePenalty",
946
                        "type": "number",
947
                        "step": 0.1,
948
                        "optional": true,
949
                        "additionalParams": true,
950
                        "id": "chatOpenAI_0-input-presencePenalty-number"
951
                    },
952
                    {
953
                        "label": "Timeout",
954
                        "name": "timeout",
955
                        "type": "number",
956
                        "step": 1,
957
                        "optional": true,
958
                        "additionalParams": true,
959
                        "id": "chatOpenAI_0-input-timeout-number"
960
                    },
961
                    {
962
                        "label": "BasePath",
963
                        "name": "basepath",
964
                        "type": "string",
965
                        "optional": true,
966
                        "additionalParams": true,
967
                        "id": "chatOpenAI_0-input-basepath-string"
968
                    },
969
                    {
970
                        "label": "BaseOptions",
971
                        "name": "baseOptions",
972
                        "type": "json",
973
                        "optional": true,
974
                        "additionalParams": true,
975
                        "id": "chatOpenAI_0-input-baseOptions-json"
976
                    },
977
                    {
978
                        "label": "Allow Image Uploads",
979
                        "name": "allowImageUploads",
980
                        "type": "boolean",
981
                        "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent",
982
                        "default": false,
983
                        "optional": true,
984
                        "id": "chatOpenAI_0-input-allowImageUploads-boolean"
985
                    },
986
                    {
987
                        "label": "Image Resolution",
988
                        "description": "This parameter controls the resolution in which the model views the image.",
989
                        "name": "imageResolution",
990
                        "type": "options",
991
                        "options": [
992
                            {
993
                                "label": "Low",
994
                                "name": "low"
995
                            },
996
                            {
997
                                "label": "High",
998
                                "name": "high"
999
                            },
1000
                            {
1001
                                "label": "Auto",
1002
                                "name": "auto"
1003
                            }
1004
                        ],
1005
                        "default": "low",
1006
                        "optional": false,
1007
                        "additionalParams": true,
1008
                        "id": "chatOpenAI_0-input-imageResolution-options"
1009
                    }
1010
                ],
1011
                "inputAnchors": [
1012
                    {
1013
                        "label": "Cache",
1014
                        "name": "cache",
1015
                        "type": "BaseCache",
1016
                        "optional": true,
1017
                        "id": "chatOpenAI_0-input-cache-BaseCache"
1018
                    }
1019
                ],
1020
                "inputs": {
1021
                    "cache": "",
1022
                    "modelName": "gpt-3.5-turbo",
1023
                    "temperature": 0.9,
1024
                    "maxTokens": "",
1025
                    "topP": "",
1026
                    "frequencyPenalty": "",
1027
                    "presencePenalty": "",
1028
                    "timeout": "",
1029
                    "basepath": "",
1030
                    "baseOptions": "",
1031
                    "allowImageUploads": true,
1032
                    "imageResolution": "low"
1033
                },
1034
                "outputAnchors": [
1035
                    {
1036
                        "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable",
1037
                        "name": "chatOpenAI",
1038
                        "label": "ChatOpenAI",
1039
                        "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable"
1040
                    }
1041
                ],
1042
                "outputs": {},
1043
                "selected": false
1044
            },
1045
            "selected": false,
1046
            "positionAbsolute": {
1047
                "x": 2373.5711587130127,
1048
                "y": 487.8533802540226
1049
            },
1050
            "dragging": false
1051
        }
1052
    ],
1053
    "edges": [
1054
        {
1055
            "source": "openAI_1",
1056
            "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
1057
            "target": "llmChain_0",
1058
            "targetHandle": "llmChain_0-input-model-BaseLanguageModel",
1059
            "type": "buttonedge",
1060
            "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel",
1061
            "data": {
1062
                "label": ""
1063
            }
1064
        },
1065
        {
1066
            "source": "promptTemplate_0",
1067
            "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
1068
            "target": "llmChain_0",
1069
            "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
1070
            "type": "buttonedge",
1071
            "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
1072
            "data": {
1073
                "label": ""
1074
            }
1075
        },
1076
        {
1077
            "source": "promptTemplate_1",
1078
            "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
1079
            "target": "llmChain_1",
1080
            "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
1081
            "type": "buttonedge",
1082
            "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate",
1083
            "data": {
1084
                "label": ""
1085
            }
1086
        },
1087
        {
1088
            "source": "openAI_2",
1089
            "sourceHandle": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
1090
            "target": "llmChain_1",
1091
            "targetHandle": "llmChain_1-input-model-BaseLanguageModel",
1092
            "type": "buttonedge",
1093
            "id": "openAI_2-openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_1-llmChain_1-input-model-BaseLanguageModel",
1094
            "data": {
1095
                "label": ""
1096
            }
1097
        },
1098
        {
1099
            "source": "promptTemplate_2",
1100
            "sourceHandle": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate",
1101
            "target": "llmChain_2",
1102
            "targetHandle": "llmChain_2-input-prompt-BasePromptTemplate",
1103
            "type": "buttonedge",
1104
            "id": "promptTemplate_2-promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_2-llmChain_2-input-prompt-BasePromptTemplate"
1105
        },
1106
        {
1107
            "source": "llmChain_0",
1108
            "sourceHandle": "llmChain_0-output-outputPrediction-string|json",
1109
            "target": "ifElseFunction_0",
1110
            "targetHandle": "ifElseFunction_0-input-functionInputVariables-json",
1111
            "type": "buttonedge",
1112
            "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-ifElseFunction_0-ifElseFunction_0-input-functionInputVariables-json"
1113
        },
1114
        {
1115
            "source": "ifElseFunction_0",
1116
            "sourceHandle": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array",
1117
            "target": "promptTemplate_2",
1118
            "targetHandle": "promptTemplate_2-input-promptValues-json",
1119
            "type": "buttonedge",
1120
            "id": "ifElseFunction_0-ifElseFunction_0-output-returnFalse-string|number|boolean|json|array-promptTemplate_2-promptTemplate_2-input-promptValues-json"
1121
        },
1122
        {
1123
            "source": "ifElseFunction_0",
1124
            "sourceHandle": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array",
1125
            "target": "promptTemplate_1",
1126
            "targetHandle": "promptTemplate_1-input-promptValues-json",
1127
            "type": "buttonedge",
1128
            "id": "ifElseFunction_0-ifElseFunction_0-output-returnTrue-string|number|boolean|json|array-promptTemplate_1-promptTemplate_1-input-promptValues-json"
1129
        },
1130
        {
1131
            "source": "chatOpenAI_0",
1132
            "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable",
1133
            "target": "llmChain_2",
1134
            "targetHandle": "llmChain_2-input-model-BaseLanguageModel",
1135
            "type": "buttonedge",
1136
            "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_2-llmChain_2-input-model-BaseLanguageModel"
1137
        }
1138
    ]
1139
}
1140

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.