Flowise
1235 строк · 56.1 Кб
1{
2"description": "Breaks down query into sub questions for each relevant data source, then combine into final response",
3"categories": "Sub Question Query Engine,Sticky Note,QueryEngine Tool,Compact and Refine,ChatOpenAI,Pinecone,LlamaIndex",
4"framework": "LlamaIndex",
5"badge": "NEW",
6"nodes": [
7{
8"width": 300,
9"height": 749,
10"id": "compactrefineLlamaIndex_0",
11"position": {
12"x": -443.9012456561584,
13"y": 826.6100190232154
14},
15"type": "customNode",
16"data": {
17"id": "compactrefineLlamaIndex_0",
18"label": "Compact and Refine",
19"version": 1,
20"name": "compactrefineLlamaIndex",
21"type": "CompactRefine",
22"baseClasses": ["CompactRefine", "ResponseSynthesizer"],
23"tags": ["LlamaIndex"],
24"category": "Response Synthesizer",
25"description": "CompactRefine is a slight variation of Refine that first compacts the text chunks into the smallest possible number of chunks.",
26"inputParams": [
27{
28"label": "Refine Prompt",
29"name": "refinePrompt",
30"type": "string",
31"rows": 4,
32"default": "The original query is as follows: {query}\nWe have provided an existing answer: {existingAnswer}\nWe have the opportunity to refine the existing answer (only if needed) with some more context below.\n------------\n{context}\n------------\nGiven the new context, refine the original answer to better answer the query. If the context isn't useful, return the original answer.\nRefined Answer:",
33"warning": "Prompt can contains no variables, or up to 3 variables. Variables must be {existingAnswer}, {context} and {query}",
34"optional": true,
35"id": "compactrefineLlamaIndex_0-input-refinePrompt-string"
36},
37{
38"label": "Text QA Prompt",
39"name": "textQAPrompt",
40"type": "string",
41"rows": 4,
42"default": "Context information is below.\n---------------------\n{context}\n---------------------\nGiven the context information and not prior knowledge, answer the query.\nQuery: {query}\nAnswer:",
43"warning": "Prompt can contains no variables, or up to 2 variables. Variables must be {context} and {query}",
44"optional": true,
45"id": "compactrefineLlamaIndex_0-input-textQAPrompt-string"
46}
47],
48"inputAnchors": [],
49"inputs": {
50"refinePrompt": "A user has selected a set of SEC filing documents and has asked a question about them.\nThe SEC documents have the following titles:\n- Apple Inc (APPL) FORM 10K 2022\n- Tesla Inc (TSLA) FORM 10K 2022\nThe original query is as follows: {query}\nWe have provided an existing answer: {existingAnswer}\nWe have the opportunity to refine the existing answer (only if needed) with some more context below.\n------------\n{context}\n------------\nGiven the new context, refine the original answer to better answer the query. If the context isn't useful, return the original answer.\nRefined Answer:",
51"textQAPrompt": "A user has selected a set of SEC filing documents and has asked a question about them.\nThe SEC documents have the following titles:\n- Apple Inc (APPL) FORM 10K 2022\n- Tesla Inc (TSLA) FORM 10K 2022\nContext information is below.\n---------------------\n{context}\n---------------------\nGiven the context information and not prior knowledge, answer the query.\nQuery: {query}\nAnswer:"
52},
53"outputAnchors": [
54{
55"id": "compactrefineLlamaIndex_0-output-compactrefineLlamaIndex-CompactRefine|ResponseSynthesizer",
56"name": "compactrefineLlamaIndex",
57"label": "CompactRefine",
58"type": "CompactRefine | ResponseSynthesizer"
59}
60],
61"outputs": {},
62"selected": false
63},
64"selected": false,
65"positionAbsolute": {
66"x": -443.9012456561584,
67"y": 826.6100190232154
68},
69"dragging": false
70},
71{
72"width": 300,
73"height": 611,
74"id": "pineconeLlamaIndex_0",
75"position": {
76"x": 35.45798119088212,
77"y": -132.1789597307308
78},
79"type": "customNode",
80"data": {
81"id": "pineconeLlamaIndex_0",
82"label": "Pinecone",
83"version": 1,
84"name": "pineconeLlamaIndex",
85"type": "Pinecone",
86"baseClasses": ["Pinecone", "VectorIndexRetriever"],
87"tags": ["LlamaIndex"],
88"category": "Vector Stores",
89"description": "Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database",
90"inputParams": [
91{
92"label": "Connect Credential",
93"name": "credential",
94"type": "credential",
95"credentialNames": ["pineconeApi"],
96"id": "pineconeLlamaIndex_0-input-credential-credential"
97},
98{
99"label": "Pinecone Index",
100"name": "pineconeIndex",
101"type": "string",
102"id": "pineconeLlamaIndex_0-input-pineconeIndex-string"
103},
104{
105"label": "Pinecone Namespace",
106"name": "pineconeNamespace",
107"type": "string",
108"placeholder": "my-first-namespace",
109"additionalParams": true,
110"optional": true,
111"id": "pineconeLlamaIndex_0-input-pineconeNamespace-string"
112},
113{
114"label": "Pinecone Metadata Filter",
115"name": "pineconeMetadataFilter",
116"type": "json",
117"optional": true,
118"additionalParams": true,
119"id": "pineconeLlamaIndex_0-input-pineconeMetadataFilter-json"
120},
121{
122"label": "Top K",
123"name": "topK",
124"description": "Number of top results to fetch. Default to 4",
125"placeholder": "4",
126"type": "number",
127"additionalParams": true,
128"optional": true,
129"id": "pineconeLlamaIndex_0-input-topK-number"
130}
131],
132"inputAnchors": [
133{
134"label": "Document",
135"name": "document",
136"type": "Document",
137"list": true,
138"optional": true,
139"id": "pineconeLlamaIndex_0-input-document-Document"
140},
141{
142"label": "Chat Model",
143"name": "model",
144"type": "BaseChatModel_LlamaIndex",
145"id": "pineconeLlamaIndex_0-input-model-BaseChatModel_LlamaIndex"
146},
147{
148"label": "Embeddings",
149"name": "embeddings",
150"type": "BaseEmbedding_LlamaIndex",
151"id": "pineconeLlamaIndex_0-input-embeddings-BaseEmbedding_LlamaIndex"
152}
153],
154"inputs": {
155"document": [],
156"model": "{{chatOpenAI_LlamaIndex_0.data.instance}}",
157"embeddings": "{{openAIEmbedding_LlamaIndex_0.data.instance}}",
158"pineconeIndex": "flowiseindex",
159"pineconeNamespace": "pinecone-form10k",
160"pineconeMetadataFilter": "{\"source\":\"tesla\"}",
161"topK": ""
162},
163"outputAnchors": [
164{
165"name": "output",
166"label": "Output",
167"type": "options",
168"options": [
169{
170"id": "pineconeLlamaIndex_0-output-retriever-Pinecone|VectorIndexRetriever",
171"name": "retriever",
172"label": "Pinecone Retriever",
173"type": "Pinecone | VectorIndexRetriever"
174},
175{
176"id": "pineconeLlamaIndex_0-output-vectorStore-Pinecone|VectorStoreIndex",
177"name": "vectorStore",
178"label": "Pinecone Vector Store Index",
179"type": "Pinecone | VectorStoreIndex"
180}
181],
182"default": "retriever"
183}
184],
185"outputs": {
186"output": "retriever"
187},
188"selected": false
189},
190"selected": false,
191"positionAbsolute": {
192"x": 35.45798119088212,
193"y": -132.1789597307308
194},
195"dragging": false
196},
197{
198"width": 300,
199"height": 529,
200"id": "chatOpenAI_LlamaIndex_0",
201"position": {
202"x": -455.232655468177,
203"y": -711.0080711676725
204},
205"type": "customNode",
206"data": {
207"id": "chatOpenAI_LlamaIndex_0",
208"label": "ChatOpenAI",
209"version": 2.0,
210"name": "chatOpenAI_LlamaIndex",
211"type": "ChatOpenAI",
212"baseClasses": ["ChatOpenAI", "BaseChatModel_LlamaIndex", "BaseLLM"],
213"tags": ["LlamaIndex"],
214"category": "Chat Models",
215"description": "Wrapper around OpenAI Chat LLM specific for LlamaIndex",
216"inputParams": [
217{
218"label": "Connect Credential",
219"name": "credential",
220"type": "credential",
221"credentialNames": ["openAIApi"],
222"id": "chatOpenAI_LlamaIndex_0-input-credential-credential"
223},
224{
225"label": "Model Name",
226"name": "modelName",
227"type": "asyncOptions",
228"loadMethod": "listModels",
229"default": "gpt-3.5-turbo",
230"id": "chatOpenAI_LlamaIndex_0-input-modelName-options"
231},
232{
233"label": "Temperature",
234"name": "temperature",
235"type": "number",
236"step": 0.1,
237"default": 0.9,
238"optional": true,
239"id": "chatOpenAI_LlamaIndex_0-input-temperature-number"
240},
241{
242"label": "Max Tokens",
243"name": "maxTokens",
244"type": "number",
245"step": 1,
246"optional": true,
247"additionalParams": true,
248"id": "chatOpenAI_LlamaIndex_0-input-maxTokens-number"
249},
250{
251"label": "Top Probability",
252"name": "topP",
253"type": "number",
254"step": 0.1,
255"optional": true,
256"additionalParams": true,
257"id": "chatOpenAI_LlamaIndex_0-input-topP-number"
258},
259{
260"label": "Timeout",
261"name": "timeout",
262"type": "number",
263"step": 1,
264"optional": true,
265"additionalParams": true,
266"id": "chatOpenAI_LlamaIndex_0-input-timeout-number"
267}
268],
269"inputAnchors": [],
270"inputs": {
271"modelName": "gpt-3.5-turbo-16k",
272"temperature": "0",
273"maxTokens": "",
274"topP": "",
275"timeout": ""
276},
277"outputAnchors": [
278{
279"id": "chatOpenAI_LlamaIndex_0-output-chatOpenAI_LlamaIndex-ChatOpenAI|BaseChatModel_LlamaIndex|BaseLLM",
280"name": "chatOpenAI_LlamaIndex",
281"label": "ChatOpenAI",
282"type": "ChatOpenAI | BaseChatModel_LlamaIndex | BaseLLM"
283}
284],
285"outputs": {},
286"selected": false
287},
288"selected": false,
289"positionAbsolute": {
290"x": -455.232655468177,
291"y": -711.0080711676725
292},
293"dragging": false
294},
295{
296"width": 300,
297"height": 334,
298"id": "openAIEmbedding_LlamaIndex_0",
299"position": {
300"x": -451.0082548287243,
301"y": -127.15143353229783
302},
303"type": "customNode",
304"data": {
305"id": "openAIEmbedding_LlamaIndex_0",
306"label": "OpenAI Embedding",
307"version": 2,
308"name": "openAIEmbedding_LlamaIndex",
309"type": "OpenAIEmbedding",
310"baseClasses": ["OpenAIEmbedding", "BaseEmbedding_LlamaIndex", "BaseEmbedding"],
311"tags": ["LlamaIndex"],
312"category": "Embeddings",
313"description": "OpenAI Embedding specific for LlamaIndex",
314"inputParams": [
315{
316"label": "Connect Credential",
317"name": "credential",
318"type": "credential",
319"credentialNames": ["openAIApi"],
320"id": "openAIEmbedding_LlamaIndex_0-input-credential-credential"
321},
322{
323"label": "Model Name",
324"name": "modelName",
325"type": "asyncOptions",
326"loadMethod": "listModels",
327"default": "text-embedding-ada-002",
328"id": "openAIEmbedding_LlamaIndex_0-input-modelName-options"
329},
330{
331"label": "Timeout",
332"name": "timeout",
333"type": "number",
334"optional": true,
335"additionalParams": true,
336"id": "openAIEmbedding_LlamaIndex_0-input-timeout-number"
337},
338{
339"label": "BasePath",
340"name": "basepath",
341"type": "string",
342"optional": true,
343"additionalParams": true,
344"id": "openAIEmbedding_LlamaIndex_0-input-basepath-string"
345}
346],
347"inputAnchors": [],
348"inputs": {
349"timeout": "",
350"basepath": "",
351"modelName": "text-embedding-ada-002"
352},
353"outputAnchors": [
354{
355"id": "openAIEmbedding_LlamaIndex_0-output-openAIEmbedding_LlamaIndex-OpenAIEmbedding|BaseEmbedding_LlamaIndex|BaseEmbedding",
356"name": "openAIEmbedding_LlamaIndex",
357"label": "OpenAIEmbedding",
358"type": "OpenAIEmbedding | BaseEmbedding_LlamaIndex | BaseEmbedding"
359}
360],
361"outputs": {},
362"selected": false
363},
364"selected": false,
365"dragging": false,
366"positionAbsolute": {
367"x": -451.0082548287243,
368"y": -127.15143353229783
369}
370},
371{
372"width": 300,
373"height": 611,
374"id": "pineconeLlamaIndex_1",
375"position": {
376"x": 43.95604951980056,
377"y": -783.0024679245387
378},
379"type": "customNode",
380"data": {
381"id": "pineconeLlamaIndex_1",
382"label": "Pinecone",
383"version": 1,
384"name": "pineconeLlamaIndex",
385"type": "Pinecone",
386"baseClasses": ["Pinecone", "VectorIndexRetriever"],
387"tags": ["LlamaIndex"],
388"category": "Vector Stores",
389"description": "Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database",
390"inputParams": [
391{
392"label": "Connect Credential",
393"name": "credential",
394"type": "credential",
395"credentialNames": ["pineconeApi"],
396"id": "pineconeLlamaIndex_1-input-credential-credential"
397},
398{
399"label": "Pinecone Index",
400"name": "pineconeIndex",
401"type": "string",
402"id": "pineconeLlamaIndex_1-input-pineconeIndex-string"
403},
404{
405"label": "Pinecone Namespace",
406"name": "pineconeNamespace",
407"type": "string",
408"placeholder": "my-first-namespace",
409"additionalParams": true,
410"optional": true,
411"id": "pineconeLlamaIndex_1-input-pineconeNamespace-string"
412},
413{
414"label": "Pinecone Metadata Filter",
415"name": "pineconeMetadataFilter",
416"type": "json",
417"optional": true,
418"additionalParams": true,
419"id": "pineconeLlamaIndex_1-input-pineconeMetadataFilter-json"
420},
421{
422"label": "Top K",
423"name": "topK",
424"description": "Number of top results to fetch. Default to 4",
425"placeholder": "4",
426"type": "number",
427"additionalParams": true,
428"optional": true,
429"id": "pineconeLlamaIndex_1-input-topK-number"
430}
431],
432"inputAnchors": [
433{
434"label": "Document",
435"name": "document",
436"type": "Document",
437"list": true,
438"optional": true,
439"id": "pineconeLlamaIndex_1-input-document-Document"
440},
441{
442"label": "Chat Model",
443"name": "model",
444"type": "BaseChatModel_LlamaIndex",
445"id": "pineconeLlamaIndex_1-input-model-BaseChatModel_LlamaIndex"
446},
447{
448"label": "Embeddings",
449"name": "embeddings",
450"type": "BaseEmbedding_LlamaIndex",
451"id": "pineconeLlamaIndex_1-input-embeddings-BaseEmbedding_LlamaIndex"
452}
453],
454"inputs": {
455"document": [],
456"model": "{{chatOpenAI_LlamaIndex_0.data.instance}}",
457"embeddings": "{{openAIEmbedding_LlamaIndex_0.data.instance}}",
458"pineconeIndex": "flowiseindex",
459"pineconeNamespace": "pinecone-form10k",
460"pineconeMetadataFilter": "{\"source\":\"apple\"}",
461"topK": ""
462},
463"outputAnchors": [
464{
465"name": "output",
466"label": "Output",
467"type": "options",
468"options": [
469{
470"id": "pineconeLlamaIndex_1-output-retriever-Pinecone|VectorIndexRetriever",
471"name": "retriever",
472"label": "Pinecone Retriever",
473"type": "Pinecone | VectorIndexRetriever"
474},
475{
476"id": "pineconeLlamaIndex_1-output-vectorStore-Pinecone|VectorStoreIndex",
477"name": "vectorStore",
478"label": "Pinecone Vector Store Index",
479"type": "Pinecone | VectorStoreIndex"
480}
481],
482"default": "retriever"
483}
484],
485"outputs": {
486"output": "retriever"
487},
488"selected": false
489},
490"selected": false,
491"positionAbsolute": {
492"x": 43.95604951980056,
493"y": -783.0024679245387
494},
495"dragging": false
496},
497{
498"width": 300,
499"height": 529,
500"id": "chatOpenAI_LlamaIndex_1",
501"position": {
502"x": -446.80851289432655,
503"y": 246.8790997755625
504},
505"type": "customNode",
506"data": {
507"id": "chatOpenAI_LlamaIndex_1",
508"label": "ChatOpenAI",
509"version": 2.0,
510"name": "chatOpenAI_LlamaIndex",
511"type": "ChatOpenAI",
512"baseClasses": ["ChatOpenAI", "BaseChatModel_LlamaIndex", "BaseLLM"],
513"tags": ["LlamaIndex"],
514"category": "Chat Models",
515"description": "Wrapper around OpenAI Chat LLM specific for LlamaIndex",
516"inputParams": [
517{
518"label": "Connect Credential",
519"name": "credential",
520"type": "credential",
521"credentialNames": ["openAIApi"],
522"id": "chatOpenAI_LlamaIndex_1-input-credential-credential"
523},
524{
525"label": "Model Name",
526"name": "modelName",
527"type": "asyncOptions",
528"loadMethod": "listModels",
529"default": "gpt-3.5-turbo",
530"id": "chatOpenAI_LlamaIndex_1-input-modelName-options"
531},
532{
533"label": "Temperature",
534"name": "temperature",
535"type": "number",
536"step": 0.1,
537"default": 0.9,
538"optional": true,
539"id": "chatOpenAI_LlamaIndex_1-input-temperature-number"
540},
541{
542"label": "Max Tokens",
543"name": "maxTokens",
544"type": "number",
545"step": 1,
546"optional": true,
547"additionalParams": true,
548"id": "chatOpenAI_LlamaIndex_1-input-maxTokens-number"
549},
550{
551"label": "Top Probability",
552"name": "topP",
553"type": "number",
554"step": 0.1,
555"optional": true,
556"additionalParams": true,
557"id": "chatOpenAI_LlamaIndex_1-input-topP-number"
558},
559{
560"label": "Timeout",
561"name": "timeout",
562"type": "number",
563"step": 1,
564"optional": true,
565"additionalParams": true,
566"id": "chatOpenAI_LlamaIndex_1-input-timeout-number"
567}
568],
569"inputAnchors": [],
570"inputs": {
571"modelName": "gpt-3.5-turbo-16k",
572"temperature": "0",
573"maxTokens": "",
574"topP": "",
575"timeout": ""
576},
577"outputAnchors": [
578{
579"id": "chatOpenAI_LlamaIndex_1-output-chatOpenAI_LlamaIndex-ChatOpenAI|BaseChatModel_LlamaIndex|BaseLLM",
580"name": "chatOpenAI_LlamaIndex",
581"label": "ChatOpenAI",
582"type": "ChatOpenAI | BaseChatModel_LlamaIndex | BaseLLM"
583}
584],
585"outputs": {},
586"selected": false
587},
588"selected": false,
589"positionAbsolute": {
590"x": -446.80851289432655,
591"y": 246.8790997755625
592},
593"dragging": false
594},
595{
596"width": 300,
597"height": 334,
598"id": "openAIEmbedding_LlamaIndex_1",
599"position": {
600"x": -37.812177549447284,
601"y": 577.9112529482311
602},
603"type": "customNode",
604"data": {
605"id": "openAIEmbedding_LlamaIndex_1",
606"label": "OpenAI Embedding",
607"version": 2,
608"name": "openAIEmbedding_LlamaIndex",
609"type": "OpenAIEmbedding",
610"baseClasses": ["OpenAIEmbedding", "BaseEmbedding_LlamaIndex", "BaseEmbedding"],
611"tags": ["LlamaIndex"],
612"category": "Embeddings",
613"description": "OpenAI Embedding specific for LlamaIndex",
614"inputParams": [
615{
616"label": "Connect Credential",
617"name": "credential",
618"type": "credential",
619"credentialNames": ["openAIApi"],
620"id": "openAIEmbedding_LlamaIndex_1-input-credential-credential"
621},
622{
623"label": "Model Name",
624"name": "modelName",
625"type": "asyncOptions",
626"loadMethod": "listModels",
627"default": "text-embedding-ada-002",
628"id": "openAIEmbedding_LlamaIndex_1-input-modelName-options"
629},
630{
631"label": "Timeout",
632"name": "timeout",
633"type": "number",
634"optional": true,
635"additionalParams": true,
636"id": "openAIEmbedding_LlamaIndex_1-input-timeout-number"
637},
638{
639"label": "BasePath",
640"name": "basepath",
641"type": "string",
642"optional": true,
643"additionalParams": true,
644"id": "openAIEmbedding_LlamaIndex_1-input-basepath-string"
645}
646],
647"inputAnchors": [],
648"inputs": {
649"timeout": "",
650"basepath": "",
651"modelName": "text-embedding-ada-002"
652},
653"outputAnchors": [
654{
655"id": "openAIEmbedding_LlamaIndex_1-output-openAIEmbedding_LlamaIndex-OpenAIEmbedding|BaseEmbedding_LlamaIndex|BaseEmbedding",
656"name": "openAIEmbedding_LlamaIndex",
657"label": "OpenAIEmbedding",
658"type": "OpenAIEmbedding | BaseEmbedding_LlamaIndex | BaseEmbedding"
659}
660],
661"outputs": {},
662"selected": false
663},
664"selected": false,
665"dragging": false,
666"positionAbsolute": {
667"x": -37.812177549447284,
668"y": 577.9112529482311
669}
670},
671{
672"width": 300,
673"height": 382,
674"id": "queryEngine_0",
675"position": {
676"x": 416.2466817793368,
677"y": -600.1335182096643
678},
679"type": "customNode",
680"data": {
681"id": "queryEngine_0",
682"label": "Query Engine",
683"version": 2,
684"name": "queryEngine",
685"type": "QueryEngine",
686"baseClasses": ["QueryEngine", "BaseQueryEngine"],
687"tags": ["LlamaIndex"],
688"category": "Engine",
689"description": "Simple query engine built to answer question over your data, without memory",
690"inputParams": [
691{
692"label": "Return Source Documents",
693"name": "returnSourceDocuments",
694"type": "boolean",
695"optional": true,
696"id": "queryEngine_0-input-returnSourceDocuments-boolean"
697}
698],
699"inputAnchors": [
700{
701"label": "Vector Store Retriever",
702"name": "vectorStoreRetriever",
703"type": "VectorIndexRetriever",
704"id": "queryEngine_0-input-vectorStoreRetriever-VectorIndexRetriever"
705},
706{
707"label": "Response Synthesizer",
708"name": "responseSynthesizer",
709"type": "ResponseSynthesizer",
710"description": "ResponseSynthesizer is responsible for sending the query, nodes, and prompt templates to the LLM to generate a response. See <a target=\"_blank\" href=\"https://ts.llamaindex.ai/modules/low_level/response_synthesizer\">more</a>",
711"optional": true,
712"id": "queryEngine_0-input-responseSynthesizer-ResponseSynthesizer"
713}
714],
715"inputs": {
716"vectorStoreRetriever": "{{pineconeLlamaIndex_1.data.instance}}",
717"responseSynthesizer": "",
718"returnSourceDocuments": ""
719},
720"outputAnchors": [
721{
722"id": "queryEngine_0-output-queryEngine-QueryEngine|BaseQueryEngine",
723"name": "queryEngine",
724"label": "QueryEngine",
725"description": "Simple query engine built to answer question over your data, without memory",
726"type": "QueryEngine | BaseQueryEngine"
727}
728],
729"outputs": {},
730"selected": false
731},
732"selected": false,
733"positionAbsolute": {
734"x": 416.2466817793368,
735"y": -600.1335182096643
736},
737"dragging": false
738},
739{
740"width": 300,
741"height": 511,
742"id": "queryEngineToolLlamaIndex_2",
743"position": {
744"x": 766.9839000102993,
745"y": -654.6926410455919
746},
747"type": "customNode",
748"data": {
749"id": "queryEngineToolLlamaIndex_2",
750"label": "QueryEngine Tool",
751"version": 2,
752"name": "queryEngineToolLlamaIndex",
753"type": "QueryEngineTool",
754"baseClasses": ["QueryEngineTool"],
755"tags": ["LlamaIndex"],
756"category": "Tools",
757"description": "Tool used to invoke query engine",
758"inputParams": [
759{
760"label": "Tool Name",
761"name": "toolName",
762"type": "string",
763"description": "Tool name must be small capital letter with underscore. Ex: my_tool",
764"id": "queryEngineToolLlamaIndex_2-input-toolName-string"
765},
766{
767"label": "Tool Description",
768"name": "toolDesc",
769"type": "string",
770"rows": 4,
771"id": "queryEngineToolLlamaIndex_2-input-toolDesc-string"
772}
773],
774"inputAnchors": [
775{
776"label": "Base QueryEngine",
777"name": "baseQueryEngine",
778"type": "BaseQueryEngine",
779"id": "queryEngineToolLlamaIndex_2-input-baseQueryEngine-BaseQueryEngine"
780}
781],
782"inputs": {
783"baseQueryEngine": "{{queryEngine_0.data.instance}}",
784"toolName": "apple_tool",
785"toolDesc": "A SEC Form 10K filing describing the financials of Apple Inc (APPL) for the 2022 time period."
786},
787"outputAnchors": [
788{
789"id": "queryEngineToolLlamaIndex_2-output-queryEngineToolLlamaIndex-QueryEngineTool",
790"name": "queryEngineToolLlamaIndex",
791"label": "QueryEngineTool",
792"description": "Tool used to invoke query engine",
793"type": "QueryEngineTool"
794}
795],
796"outputs": {},
797"selected": false
798},
799"selected": false,
800"positionAbsolute": {
801"x": 766.9839000102993,
802"y": -654.6926410455919
803},
804"dragging": false
805},
806{
807"width": 300,
808"height": 511,
809"id": "queryEngineToolLlamaIndex_1",
810"position": {
811"x": 771.5434180813253,
812"y": -109.03650423344013
813},
814"type": "customNode",
815"data": {
816"id": "queryEngineToolLlamaIndex_1",
817"label": "QueryEngine Tool",
818"version": 2,
819"name": "queryEngineToolLlamaIndex",
820"type": "QueryEngineTool",
821"baseClasses": ["QueryEngineTool"],
822"tags": ["LlamaIndex"],
823"category": "Tools",
824"description": "Tool used to invoke query engine",
825"inputParams": [
826{
827"label": "Tool Name",
828"name": "toolName",
829"type": "string",
830"description": "Tool name must be small capital letter with underscore. Ex: my_tool",
831"id": "queryEngineToolLlamaIndex_1-input-toolName-string"
832},
833{
834"label": "Tool Description",
835"name": "toolDesc",
836"type": "string",
837"rows": 4,
838"id": "queryEngineToolLlamaIndex_1-input-toolDesc-string"
839}
840],
841"inputAnchors": [
842{
843"label": "Base QueryEngine",
844"name": "baseQueryEngine",
845"type": "BaseQueryEngine",
846"id": "queryEngineToolLlamaIndex_1-input-baseQueryEngine-BaseQueryEngine"
847}
848],
849"inputs": {
850"baseQueryEngine": "{{queryEngine_1.data.instance}}",
851"toolName": "tesla_tool",
852"toolDesc": "A SEC Form 10K filing describing the financials of Tesla Inc (TSLA) for the 2022 time period."
853},
854"outputAnchors": [
855{
856"id": "queryEngineToolLlamaIndex_1-output-queryEngineToolLlamaIndex-QueryEngineTool",
857"name": "queryEngineToolLlamaIndex",
858"label": "QueryEngineTool",
859"description": "Tool used to invoke query engine",
860"type": "QueryEngineTool"
861}
862],
863"outputs": {},
864"selected": false
865},
866"selected": false,
867"positionAbsolute": {
868"x": 771.5434180813253,
869"y": -109.03650423344013
870},
871"dragging": false
872},
873{
874"width": 300,
875"height": 382,
876"id": "queryEngine_1",
877"position": {
878"x": 411.8632262885343,
879"y": -68.91392354277994
880},
881"type": "customNode",
882"data": {
883"id": "queryEngine_1",
884"label": "Query Engine",
885"version": 2,
886"name": "queryEngine",
887"type": "QueryEngine",
888"baseClasses": ["QueryEngine", "BaseQueryEngine"],
889"tags": ["LlamaIndex"],
890"category": "Engine",
891"description": "Simple query engine built to answer question over your data, without memory",
892"inputParams": [
893{
894"label": "Return Source Documents",
895"name": "returnSourceDocuments",
896"type": "boolean",
897"optional": true,
898"id": "queryEngine_1-input-returnSourceDocuments-boolean"
899}
900],
901"inputAnchors": [
902{
903"label": "Vector Store Retriever",
904"name": "vectorStoreRetriever",
905"type": "VectorIndexRetriever",
906"id": "queryEngine_1-input-vectorStoreRetriever-VectorIndexRetriever"
907},
908{
909"label": "Response Synthesizer",
910"name": "responseSynthesizer",
911"type": "ResponseSynthesizer",
912"description": "ResponseSynthesizer is responsible for sending the query, nodes, and prompt templates to the LLM to generate a response. See <a target=\"_blank\" href=\"https://ts.llamaindex.ai/modules/low_level/response_synthesizer\">more</a>",
913"optional": true,
914"id": "queryEngine_1-input-responseSynthesizer-ResponseSynthesizer"
915}
916],
917"inputs": {
918"vectorStoreRetriever": "{{pineconeLlamaIndex_0.data.instance}}",
919"responseSynthesizer": "",
920"returnSourceDocuments": ""
921},
922"outputAnchors": [
923{
924"id": "queryEngine_1-output-queryEngine-QueryEngine|BaseQueryEngine",
925"name": "queryEngine",
926"label": "QueryEngine",
927"description": "Simple query engine built to answer question over your data, without memory",
928"type": "QueryEngine | BaseQueryEngine"
929}
930],
931"outputs": {},
932"selected": false
933},
934"selected": false,
935"positionAbsolute": {
936"x": 411.8632262885343,
937"y": -68.91392354277994
938},
939"dragging": false
940},
941{
942"width": 300,
943"height": 484,
944"id": "subQuestionQueryEngine_0",
945"position": {
946"x": 1204.489328490966,
947"y": 347.2090726754211
948},
949"type": "customNode",
950"data": {
951"id": "subQuestionQueryEngine_0",
952"label": "Sub Question Query Engine",
953"version": 2,
954"name": "subQuestionQueryEngine",
955"type": "SubQuestionQueryEngine",
956"baseClasses": ["SubQuestionQueryEngine", "BaseQueryEngine"],
957"tags": ["LlamaIndex"],
958"category": "Engine",
959"description": "Breaks complex query into sub questions for each relevant data source, then gather all the intermediate reponses and synthesizes a final response",
960"inputParams": [
961{
962"label": "Return Source Documents",
963"name": "returnSourceDocuments",
964"type": "boolean",
965"optional": true,
966"id": "subQuestionQueryEngine_0-input-returnSourceDocuments-boolean"
967}
968],
969"inputAnchors": [
970{
971"label": "QueryEngine Tools",
972"name": "queryEngineTools",
973"type": "QueryEngineTool",
974"list": true,
975"id": "subQuestionQueryEngine_0-input-queryEngineTools-QueryEngineTool"
976},
977{
978"label": "Chat Model",
979"name": "model",
980"type": "BaseChatModel_LlamaIndex",
981"id": "subQuestionQueryEngine_0-input-model-BaseChatModel_LlamaIndex"
982},
983{
984"label": "Embeddings",
985"name": "embeddings",
986"type": "BaseEmbedding_LlamaIndex",
987"id": "subQuestionQueryEngine_0-input-embeddings-BaseEmbedding_LlamaIndex"
988},
989{
990"label": "Response Synthesizer",
991"name": "responseSynthesizer",
992"type": "ResponseSynthesizer",
993"description": "ResponseSynthesizer is responsible for sending the query, nodes, and prompt templates to the LLM to generate a response. See <a target=\"_blank\" href=\"https://ts.llamaindex.ai/modules/low_level/response_synthesizer\">more</a>",
994"optional": true,
995"id": "subQuestionQueryEngine_0-input-responseSynthesizer-ResponseSynthesizer"
996}
997],
998"inputs": {
999"queryEngineTools": ["{{queryEngineToolLlamaIndex_2.data.instance}}", "{{queryEngineToolLlamaIndex_1.data.instance}}"],
1000"model": "{{chatOpenAI_LlamaIndex_1.data.instance}}",
1001"embeddings": "{{openAIEmbedding_LlamaIndex_1.data.instance}}",
1002"responseSynthesizer": "{{compactrefineLlamaIndex_0.data.instance}}",
1003"returnSourceDocuments": true
1004},
1005"outputAnchors": [
1006{
1007"id": "subQuestionQueryEngine_0-output-subQuestionQueryEngine-SubQuestionQueryEngine|BaseQueryEngine",
1008"name": "subQuestionQueryEngine",
1009"label": "SubQuestionQueryEngine",
1010"description": "Breaks complex query into sub questions for each relevant data source, then gather all the intermediate reponses and synthesizes a final response",
1011"type": "SubQuestionQueryEngine | BaseQueryEngine"
1012}
1013],
1014"outputs": {},
1015"selected": false
1016},
1017"selected": false,
1018"positionAbsolute": {
1019"x": 1204.489328490966,
1020"y": 347.2090726754211
1021},
1022"dragging": false
1023},
1024{
1025"width": 300,
1026"height": 82,
1027"id": "stickyNote_0",
1028"position": {
1029"x": 1208.1786832265154,
1030"y": 238.26647262900994
1031},
1032"type": "stickyNote",
1033"data": {
1034"id": "stickyNote_0",
1035"label": "Sticky Note",
1036"version": 1,
1037"name": "stickyNote",
1038"type": "StickyNote",
1039"baseClasses": ["StickyNote"],
1040"category": "Utilities",
1041"description": "Add a sticky note",
1042"inputParams": [
1043{
1044"label": "",
1045"name": "note",
1046"type": "string",
1047"rows": 1,
1048"placeholder": "Type something here",
1049"optional": true,
1050"id": "stickyNote_0-input-note-string"
1051}
1052],
1053"inputAnchors": [],
1054"inputs": {
1055"note": "Break questions into subqueries, then retrieve corresponding context using queryengine tools"
1056},
1057"outputAnchors": [
1058{
1059"id": "stickyNote_0-output-stickyNote-StickyNote",
1060"name": "stickyNote",
1061"label": "StickyNote",
1062"description": "Add a sticky note",
1063"type": "StickyNote"
1064}
1065],
1066"outputs": {},
1067"selected": false
1068},
1069"selected": false,
1070"positionAbsolute": {
1071"x": 1208.1786832265154,
1072"y": 238.26647262900994
1073},
1074"dragging": false
1075},
1076{
1077"width": 300,
1078"height": 82,
1079"id": "stickyNote_1",
1080"position": {
1081"x": 416.8958270395809,
1082"y": -179.9680840754678
1083},
1084"type": "stickyNote",
1085"data": {
1086"id": "stickyNote_1",
1087"label": "Sticky Note",
1088"version": 1,
1089"name": "stickyNote",
1090"type": "StickyNote",
1091"baseClasses": ["StickyNote"],
1092"category": "Utilities",
1093"description": "Add a sticky note",
1094"inputParams": [
1095{
1096"label": "",
1097"name": "note",
1098"type": "string",
1099"rows": 1,
1100"placeholder": "Type something here",
1101"optional": true,
1102"id": "stickyNote_1-input-note-string"
1103}
1104],
1105"inputAnchors": [],
1106"inputs": {
1107"note": "Query previously upserted documents with corresponding metadata key value pair - \n{ source: \"<company>\"}"
1108},
1109"outputAnchors": [
1110{
1111"id": "stickyNote_1-output-stickyNote-StickyNote",
1112"name": "stickyNote",
1113"label": "StickyNote",
1114"description": "Add a sticky note",
1115"type": "StickyNote"
1116}
1117],
1118"outputs": {},
1119"selected": false
1120},
1121"selected": false,
1122"positionAbsolute": {
1123"x": 416.8958270395809,
1124"y": -179.9680840754678
1125},
1126"dragging": false
1127}
1128],
1129"edges": [
1130{
1131"source": "chatOpenAI_LlamaIndex_0",
1132"sourceHandle": "chatOpenAI_LlamaIndex_0-output-chatOpenAI_LlamaIndex-ChatOpenAI|BaseChatModel_LlamaIndex|BaseLLM",
1133"target": "pineconeLlamaIndex_1",
1134"targetHandle": "pineconeLlamaIndex_1-input-model-BaseChatModel_LlamaIndex",
1135"type": "buttonedge",
1136"id": "chatOpenAI_LlamaIndex_0-chatOpenAI_LlamaIndex_0-output-chatOpenAI_LlamaIndex-ChatOpenAI|BaseChatModel_LlamaIndex|BaseLLM-pineconeLlamaIndex_1-pineconeLlamaIndex_1-input-model-BaseChatModel_LlamaIndex"
1137},
1138{
1139"source": "openAIEmbedding_LlamaIndex_0",
1140"sourceHandle": "openAIEmbedding_LlamaIndex_0-output-openAIEmbedding_LlamaIndex-OpenAIEmbedding|BaseEmbedding_LlamaIndex|BaseEmbedding",
1141"target": "pineconeLlamaIndex_1",
1142"targetHandle": "pineconeLlamaIndex_1-input-embeddings-BaseEmbedding_LlamaIndex",
1143"type": "buttonedge",
1144"id": "openAIEmbedding_LlamaIndex_0-openAIEmbedding_LlamaIndex_0-output-openAIEmbedding_LlamaIndex-OpenAIEmbedding|BaseEmbedding_LlamaIndex|BaseEmbedding-pineconeLlamaIndex_1-pineconeLlamaIndex_1-input-embeddings-BaseEmbedding_LlamaIndex"
1145},
1146{
1147"source": "openAIEmbedding_LlamaIndex_0",
1148"sourceHandle": "openAIEmbedding_LlamaIndex_0-output-openAIEmbedding_LlamaIndex-OpenAIEmbedding|BaseEmbedding_LlamaIndex|BaseEmbedding",
1149"target": "pineconeLlamaIndex_0",
1150"targetHandle": "pineconeLlamaIndex_0-input-embeddings-BaseEmbedding_LlamaIndex",
1151"type": "buttonedge",
1152"id": "openAIEmbedding_LlamaIndex_0-openAIEmbedding_LlamaIndex_0-output-openAIEmbedding_LlamaIndex-OpenAIEmbedding|BaseEmbedding_LlamaIndex|BaseEmbedding-pineconeLlamaIndex_0-pineconeLlamaIndex_0-input-embeddings-BaseEmbedding_LlamaIndex"
1153},
1154{
1155"source": "chatOpenAI_LlamaIndex_0",
1156"sourceHandle": "chatOpenAI_LlamaIndex_0-output-chatOpenAI_LlamaIndex-ChatOpenAI|BaseChatModel_LlamaIndex|BaseLLM",
1157"target": "pineconeLlamaIndex_0",
1158"targetHandle": "pineconeLlamaIndex_0-input-model-BaseChatModel_LlamaIndex",
1159"type": "buttonedge",
1160"id": "chatOpenAI_LlamaIndex_0-chatOpenAI_LlamaIndex_0-output-chatOpenAI_LlamaIndex-ChatOpenAI|BaseChatModel_LlamaIndex|BaseLLM-pineconeLlamaIndex_0-pineconeLlamaIndex_0-input-model-BaseChatModel_LlamaIndex"
1161},
1162{
1163"source": "pineconeLlamaIndex_1",
1164"sourceHandle": "pineconeLlamaIndex_1-output-retriever-Pinecone|VectorIndexRetriever",
1165"target": "queryEngine_0",
1166"targetHandle": "queryEngine_0-input-vectorStoreRetriever-VectorIndexRetriever",
1167"type": "buttonedge",
1168"id": "pineconeLlamaIndex_1-pineconeLlamaIndex_1-output-retriever-Pinecone|VectorIndexRetriever-queryEngine_0-queryEngine_0-input-vectorStoreRetriever-VectorIndexRetriever"
1169},
1170{
1171"source": "queryEngine_0",
1172"sourceHandle": "queryEngine_0-output-queryEngine-QueryEngine|BaseQueryEngine",
1173"target": "queryEngineToolLlamaIndex_2",
1174"targetHandle": "queryEngineToolLlamaIndex_2-input-baseQueryEngine-BaseQueryEngine",
1175"type": "buttonedge",
1176"id": "queryEngine_0-queryEngine_0-output-queryEngine-QueryEngine|BaseQueryEngine-queryEngineToolLlamaIndex_2-queryEngineToolLlamaIndex_2-input-baseQueryEngine-BaseQueryEngine"
1177},
1178{
1179"source": "pineconeLlamaIndex_0",
1180"sourceHandle": "pineconeLlamaIndex_0-output-retriever-Pinecone|VectorIndexRetriever",
1181"target": "queryEngine_1",
1182"targetHandle": "queryEngine_1-input-vectorStoreRetriever-VectorIndexRetriever",
1183"type": "buttonedge",
1184"id": "pineconeLlamaIndex_0-pineconeLlamaIndex_0-output-retriever-Pinecone|VectorIndexRetriever-queryEngine_1-queryEngine_1-input-vectorStoreRetriever-VectorIndexRetriever"
1185},
1186{
1187"source": "queryEngine_1",
1188"sourceHandle": "queryEngine_1-output-queryEngine-QueryEngine|BaseQueryEngine",
1189"target": "queryEngineToolLlamaIndex_1",
1190"targetHandle": "queryEngineToolLlamaIndex_1-input-baseQueryEngine-BaseQueryEngine",
1191"type": "buttonedge",
1192"id": "queryEngine_1-queryEngine_1-output-queryEngine-QueryEngine|BaseQueryEngine-queryEngineToolLlamaIndex_1-queryEngineToolLlamaIndex_1-input-baseQueryEngine-BaseQueryEngine"
1193},
1194{
1195"source": "queryEngineToolLlamaIndex_2",
1196"sourceHandle": "queryEngineToolLlamaIndex_2-output-queryEngineToolLlamaIndex-QueryEngineTool",
1197"target": "subQuestionQueryEngine_0",
1198"targetHandle": "subQuestionQueryEngine_0-input-queryEngineTools-QueryEngineTool",
1199"type": "buttonedge",
1200"id": "queryEngineToolLlamaIndex_2-queryEngineToolLlamaIndex_2-output-queryEngineToolLlamaIndex-QueryEngineTool-subQuestionQueryEngine_0-subQuestionQueryEngine_0-input-queryEngineTools-QueryEngineTool"
1201},
1202{
1203"source": "queryEngineToolLlamaIndex_1",
1204"sourceHandle": "queryEngineToolLlamaIndex_1-output-queryEngineToolLlamaIndex-QueryEngineTool",
1205"target": "subQuestionQueryEngine_0",
1206"targetHandle": "subQuestionQueryEngine_0-input-queryEngineTools-QueryEngineTool",
1207"type": "buttonedge",
1208"id": "queryEngineToolLlamaIndex_1-queryEngineToolLlamaIndex_1-output-queryEngineToolLlamaIndex-QueryEngineTool-subQuestionQueryEngine_0-subQuestionQueryEngine_0-input-queryEngineTools-QueryEngineTool"
1209},
1210{
1211"source": "chatOpenAI_LlamaIndex_1",
1212"sourceHandle": "chatOpenAI_LlamaIndex_1-output-chatOpenAI_LlamaIndex-ChatOpenAI|BaseChatModel_LlamaIndex|BaseLLM",
1213"target": "subQuestionQueryEngine_0",
1214"targetHandle": "subQuestionQueryEngine_0-input-model-BaseChatModel_LlamaIndex",
1215"type": "buttonedge",
1216"id": "chatOpenAI_LlamaIndex_1-chatOpenAI_LlamaIndex_1-output-chatOpenAI_LlamaIndex-ChatOpenAI|BaseChatModel_LlamaIndex|BaseLLM-subQuestionQueryEngine_0-subQuestionQueryEngine_0-input-model-BaseChatModel_LlamaIndex"
1217},
1218{
1219"source": "openAIEmbedding_LlamaIndex_1",
1220"sourceHandle": "openAIEmbedding_LlamaIndex_1-output-openAIEmbedding_LlamaIndex-OpenAIEmbedding|BaseEmbedding_LlamaIndex|BaseEmbedding",
1221"target": "subQuestionQueryEngine_0",
1222"targetHandle": "subQuestionQueryEngine_0-input-embeddings-BaseEmbedding_LlamaIndex",
1223"type": "buttonedge",
1224"id": "openAIEmbedding_LlamaIndex_1-openAIEmbedding_LlamaIndex_1-output-openAIEmbedding_LlamaIndex-OpenAIEmbedding|BaseEmbedding_LlamaIndex|BaseEmbedding-subQuestionQueryEngine_0-subQuestionQueryEngine_0-input-embeddings-BaseEmbedding_LlamaIndex"
1225},
1226{
1227"source": "compactrefineLlamaIndex_0",
1228"sourceHandle": "compactrefineLlamaIndex_0-output-compactrefineLlamaIndex-CompactRefine|ResponseSynthesizer",
1229"target": "subQuestionQueryEngine_0",
1230"targetHandle": "subQuestionQueryEngine_0-input-responseSynthesizer-ResponseSynthesizer",
1231"type": "buttonedge",
1232"id": "compactrefineLlamaIndex_0-compactrefineLlamaIndex_0-output-compactrefineLlamaIndex-CompactRefine|ResponseSynthesizer-subQuestionQueryEngine_0-subQuestionQueryEngine_0-input-responseSynthesizer-ResponseSynthesizer"
1233}
1234]
1235}
1236