diff --git "a/all_docs.jsonl" "b/all_docs.jsonl" new file mode 100644--- /dev/null +++ "b/all_docs.jsonl" @@ -0,0 +1,465 @@ +[] +[] +[{"title": "Welcome to LlamaIndex \ud83e\udd99 !", "text": "", "num_tokens": 805}] +[{"title": "Contributing to LlamaIndex", "text": "", "num_tokens": 801}, {"title": "Contributing to LlamaIndex", "text": "", "num_tokens": 802}, {"title": "Contributing to LlamaIndex", "text": "", "num_tokens": 802}] +[] +[{"title": "ChangeLog", "text": "", "num_tokens": 807}, {"title": "ChangeLog", "text": "", "num_tokens": 803}, {"title": "ChangeLog", "text": "", "num_tokens": 804}, {"title": "ChangeLog", "text": "", "num_tokens": 805}, {"title": "ChangeLog", "text": "", "num_tokens": 809}, {"title": "ChangeLog", "text": "", "num_tokens": 802}, {"title": "ChangeLog", "text": "", "num_tokens": 805}, {"title": "ChangeLog", "text": "", "num_tokens": 809}, {"title": "ChangeLog", "text": "", "num_tokens": 805}, {"title": "ChangeLog", "text": "", "num_tokens": 803}, {"title": "ChangeLog", "text": "", "num_tokens": 809}, {"title": "ChangeLog", "text": "", "num_tokens": 804}, {"title": "ChangeLog", "text": "", "num_tokens": 807}, {"title": "ChangeLog", "text": "", "num_tokens": 808}] +[] +[{"title": "LLMs", "text": "", "num_tokens": 831}, {"title": "LLMs", "text": "", "num_tokens": 802}, {"title": "LLMs", "text": "", "num_tokens": 801}, {"title": "LLMs", "text": "", "num_tokens": 802}, {"title": "LLMs", "text": "", "num_tokens": 801}, {"title": "LLMs", "text": "", "num_tokens": 807}] +[] +[{"title": "Prompt Templates", "text": "", "num_tokens": 804}, {"title": "Prompt Templates", "text": "", "num_tokens": 812}, {"title": "Prompt Templates", "text": "", "num_tokens": 807}] +[{"title": "Callbacks", "text": "", "num_tokens": 805}, {"title": "Callbacks", "text": "", "num_tokens": 809}, {"title": "Callbacks", "text": "", "num_tokens": 811}, {"title": "Callbacks", "text": "", "num_tokens": 807}, {"title": "Callbacks", "text": "", "num_tokens": 805}, {"title": "Callbacks", "text": "", "num_tokens": 813}, {"title": "Callbacks", "text": "", "num_tokens": 804}, {"title": "Callbacks", "text": "", "num_tokens": 813}, {"title": "Callbacks", "text": "", "num_tokens": 805}, {"title": "Callbacks", "text": "", "num_tokens": 811}, {"title": "Callbacks", "text": "", "num_tokens": 813}] +[{"title": "LLM Predictors", "text": "", "num_tokens": 806}, {"title": "LLM Predictors", "text": "", "num_tokens": 823}, {"title": "LLM Predictors", "text": "", "num_tokens": 821}] +[] +[{"title": "Evaluation", "text": "", "num_tokens": 818}, {"title": "Evaluation", "text": "", "num_tokens": 803}, {"title": "Evaluation", "text": "", "num_tokens": 809}, {"title": "Evaluation", "text": "", "num_tokens": 810}, {"title": "Evaluation", "text": "", "num_tokens": 801}, {"title": "Evaluation", "text": "", "num_tokens": 802}, {"title": "Evaluation", "text": "", "num_tokens": 855}, {"title": "Evaluation", "text": "", "num_tokens": 826}, {"title": "Evaluation", "text": "", "num_tokens": 810}, {"title": "Evaluation", "text": "", "num_tokens": 831}, {"title": "Evaluation", "text": "", "num_tokens": 861}, {"title": "Evaluation", "text": "", "num_tokens": 873}, {"title": "Evaluation", "text": "", "num_tokens": 814}, {"title": "Evaluation", "text": "", "num_tokens": 801}, {"title": "Evaluation", "text": "", "num_tokens": 868}, {"title": "Evaluation", "text": "", "num_tokens": 868}] +[] +[] +[{"title": "Indices", "text": "", "num_tokens": 808}] +[{"title": "Memory", "text": "", "num_tokens": 813}, {"title": "Memory", "text": "", "num_tokens": 804}] +[{"title": "Node Postprocessor", "text": "", "num_tokens": 803}, {"title": "Node Postprocessor", "text": "", "num_tokens": 871}, {"title": "Node Postprocessor", "text": "", "num_tokens": 815}, {"title": "Node Postprocessor", "text": "", "num_tokens": 813}, {"title": "Node Postprocessor", "text": "", "num_tokens": 855}, {"title": "Node Postprocessor", "text": "", "num_tokens": 802}, {"title": "Node Postprocessor", "text": "", "num_tokens": 880}, {"title": "Node Postprocessor", "text": "", "num_tokens": 803}, {"title": "Node Postprocessor", "text": "", "num_tokens": 807}, {"title": "Node Postprocessor", "text": "", "num_tokens": 820}, {"title": "Node Postprocessor", "text": "", "num_tokens": 812}, {"title": "Node Postprocessor", "text": "", "num_tokens": 803}, {"title": "Node Postprocessor", "text": "", "num_tokens": 828}, {"title": "Node Postprocessor", "text": "", "num_tokens": 805}, {"title": "Node Postprocessor", "text": "", "num_tokens": 958}, {"title": "Node Postprocessor", "text": "", "num_tokens": 848}, {"title": "Node Postprocessor", "text": "", "num_tokens": 802}, {"title": "Node Postprocessor", "text": "", "num_tokens": 811}, {"title": "Node Postprocessor", "text": "", "num_tokens": 809}, {"title": "Node Postprocessor", "text": "", "num_tokens": 810}, {"title": "Node Postprocessor", "text": "", "num_tokens": 837}, {"title": "Node Postprocessor", "text": "", "num_tokens": 879}, {"title": "Node Postprocessor", "text": "", "num_tokens": 808}, {"title": "Node Postprocessor", "text": "", "num_tokens": 828}] +[{"title": "Finetuning", "text": "", "num_tokens": 829}, {"title": "Finetuning", "text": "", "num_tokens": 812}] +[{"title": "Structured Index Configuration", "text": "", "num_tokens": 805}, {"title": "Structured Index Configuration", "text": "", "num_tokens": 813}] +[{"title": "Node", "text": "", "num_tokens": 817}, {"title": "Node", "text": "", "num_tokens": 803}, {"title": "Node", "text": "", "num_tokens": 809}, {"title": "Node", "text": "", "num_tokens": 803}, {"title": "Node", "text": "", "num_tokens": 814}, {"title": "Node", "text": "", "num_tokens": 801}, {"title": "Node", "text": "", "num_tokens": 808}, {"title": "Node", "text": "", "num_tokens": 804}, {"title": "Node", "text": "", "num_tokens": 812}, {"title": "Node", "text": "", "num_tokens": 802}, {"title": "Node", "text": "", "num_tokens": 809}, {"title": "Node", "text": "", "num_tokens": 801}, {"title": "Node", "text": "", "num_tokens": 805}, {"title": "Node", "text": "", "num_tokens": 810}, {"title": "Node", "text": "", "num_tokens": 817}, {"title": "Node", "text": "", "num_tokens": 804}, {"title": "Node", "text": "", "num_tokens": 801}, {"title": "Node", "text": "", "num_tokens": 806}, {"title": "Node", "text": "", "num_tokens": 886}, {"title": "Node", "text": "", "num_tokens": 807}, {"title": "Node", "text": "", "num_tokens": 803}, {"title": "Node", "text": "", "num_tokens": 802}, {"title": "Node", "text": "", "num_tokens": 811}, {"title": "Node", "text": "", "num_tokens": 810}] +[] +[{"title": "Data Connectors", "text": "", "num_tokens": 881}, {"title": "Data Connectors", "text": "", "num_tokens": 801}, {"title": "Data Connectors", "text": "", "num_tokens": 803}, {"title": "Data Connectors", "text": "", "num_tokens": 908}, {"title": "Data Connectors", "text": "", "num_tokens": 808}, {"title": "Data Connectors", "text": "", "num_tokens": 811}, {"title": "Data Connectors", "text": "", "num_tokens": 823}, {"title": "Data Connectors", "text": "", "num_tokens": 811}, {"title": "Data Connectors", "text": "", "num_tokens": 830}, {"title": "Data Connectors", "text": "", "num_tokens": 810}, {"title": "Data Connectors", "text": "", "num_tokens": 801}, {"title": "Data Connectors", "text": "", "num_tokens": 830}, {"title": "Data Connectors", "text": "", "num_tokens": 833}, {"title": "Data Connectors", "text": "", "num_tokens": 801}, {"title": "Data Connectors", "text": "", "num_tokens": 807}, {"title": "Data Connectors", "text": "", "num_tokens": 802}, {"title": "Data Connectors", "text": "", "num_tokens": 837}, {"title": "Data Connectors", "text": "", "num_tokens": 805}, {"title": "Data Connectors", "text": "", "num_tokens": 823}, {"title": "Data Connectors", "text": "", "num_tokens": 815}, {"title": "Data Connectors", "text": "", "num_tokens": 809}, {"title": "Data Connectors", "text": "", "num_tokens": 892}, {"title": "Data Connectors", "text": "", "num_tokens": 808}, {"title": "Data Connectors", "text": "", "num_tokens": 807}, {"title": "Data Connectors", "text": "", "num_tokens": 804}, {"title": "Data Connectors", "text": "", "num_tokens": 861}, {"title": "Data Connectors", "text": "", "num_tokens": 814}, {"title": "Data Connectors", "text": "", "num_tokens": 813}, {"title": "Data Connectors", "text": "", "num_tokens": 804}, {"title": "Data Connectors", "text": "", "num_tokens": 808}, {"title": "Data Connectors", "text": "", "num_tokens": 803}] +[{"title": "Service Context", "text": "", "num_tokens": 959}] +[] +[] +[{"title": "Knowledge Graph Index", "text": "", "num_tokens": 811}, {"title": "Knowledge Graph Index", "text": "", "num_tokens": 806}, {"title": "Knowledge Graph Index", "text": "", "num_tokens": 803}] +[{"title": "Empty Index", "text": "", "num_tokens": 802}] +[{"title": "Summary Index", "text": "", "num_tokens": 804}, {"title": "Summary Index", "text": "", "num_tokens": 807}] +[{"title": "Vector Store Index", "text": "", "num_tokens": 816}] +[{"title": "Table Index", "text": "", "num_tokens": 805}, {"title": "Table Index", "text": "", "num_tokens": 807}, {"title": "Table Index", "text": "", "num_tokens": 807}, {"title": "Table Index", "text": "", "num_tokens": 821}] +[{"title": "Structured Store Index", "text": "", "num_tokens": 813}, {"title": "Structured Store Index", "text": "", "num_tokens": 803}, {"title": "Structured Store Index", "text": "", "num_tokens": 803}, {"title": "Structured Store Index", "text": "", "num_tokens": 803}] +[{"title": "Tree Index", "text": "", "num_tokens": 824}, {"title": "Tree Index", "text": "", "num_tokens": 863}] +[{"title": "PromptHelper", "text": "", "num_tokens": 802}, {"title": "PromptHelper", "text": "", "num_tokens": 908}] +[{"title": "Node Parser", "text": "", "num_tokens": 801}, {"title": "Node Parser", "text": "", "num_tokens": 811}, {"title": "Node Parser", "text": "", "num_tokens": 806}, {"title": "Node Parser", "text": "", "num_tokens": 804}, {"title": "Node Parser", "text": "", "num_tokens": 802}, {"title": "Node Parser", "text": "", "num_tokens": 808}, {"title": "Node Parser", "text": "", "num_tokens": 862}, {"title": "Node Parser", "text": "", "num_tokens": 802}, {"title": "Node Parser", "text": "", "num_tokens": 811}, {"title": "Node Parser", "text": "", "num_tokens": 804}, {"title": "Node Parser", "text": "", "num_tokens": 809}, {"title": "Node Parser", "text": "", "num_tokens": 802}, {"title": "Node Parser", "text": "", "num_tokens": 885}, {"title": "Node Parser", "text": "", "num_tokens": 811}, {"title": "Node Parser", "text": "", "num_tokens": 802}, {"title": "Node Parser", "text": "", "num_tokens": 803}, {"title": "Node Parser", "text": "", "num_tokens": 801}, {"title": "Node Parser", "text": "", "num_tokens": 808}] +[{"title": "Embeddings", "text": "", "num_tokens": 809}, {"title": "Embeddings", "text": "", "num_tokens": 808}, {"title": "Embeddings", "text": "", "num_tokens": 813}, {"title": "Embeddings", "text": "", "num_tokens": 811}] +[{"title": "Langchain Integrations", "text": "", "num_tokens": 805}, {"title": "Langchain Integrations", "text": "", "num_tokens": 809}, {"title": "Langchain Integrations", "text": "", "num_tokens": 895}, {"title": "Langchain Integrations", "text": "", "num_tokens": 806}, {"title": "Langchain Integrations", "text": "", "num_tokens": 814}, {"title": "Langchain Integrations", "text": "", "num_tokens": 876}, {"title": "Langchain Integrations", "text": "", "num_tokens": 813}, {"title": "Langchain Integrations", "text": "", "num_tokens": 809}, {"title": "Langchain Integrations", "text": "", "num_tokens": 805}, {"title": "Langchain Integrations", "text": "", "num_tokens": 801}, {"title": "Langchain Integrations", "text": "", "num_tokens": 849}] +[{"title": "Index Store", "text": "", "num_tokens": 805}] +[{"title": "Document Store", "text": "", "num_tokens": 804}, {"title": "Document Store", "text": "", "num_tokens": 818}, {"title": "Document Store", "text": "", "num_tokens": 804}, {"title": "Document Store", "text": "", "num_tokens": 810}, {"title": "Document Store", "text": "", "num_tokens": 809}] +[{"title": "Vector Store", "text": "", "num_tokens": 806}, {"title": "Vector Store", "text": "", "num_tokens": 843}, {"title": "Vector Store", "text": "", "num_tokens": 805}, {"title": "Vector Store", "text": "", "num_tokens": 809}, {"title": "Vector Store", "text": "", "num_tokens": 803}, {"title": "Vector Store", "text": "", "num_tokens": 813}, {"title": "Vector Store", "text": "", "num_tokens": 807}, {"title": "Vector Store", "text": "", "num_tokens": 810}, {"title": "Vector Store", "text": "", "num_tokens": 830}, {"title": "Vector Store", "text": "", "num_tokens": 804}, {"title": "Vector Store", "text": "", "num_tokens": 807}, {"title": "Vector Store", "text": "", "num_tokens": 844}, {"title": "Vector Store", "text": "", "num_tokens": 838}, {"title": "Vector Store", "text": "", "num_tokens": 801}, {"title": "Vector Store", "text": "", "num_tokens": 804}, {"title": "Vector Store", "text": "", "num_tokens": 806}, {"title": "Vector Store", "text": "", "num_tokens": 809}, {"title": "Vector Store", "text": "", "num_tokens": 803}, {"title": "Vector Store", "text": "", "num_tokens": 805}, {"title": "Vector Store", "text": "", "num_tokens": 811}, {"title": "Vector Store", "text": "", "num_tokens": 811}, {"title": "Vector Store", "text": "", "num_tokens": 807}, {"title": "Vector Store", "text": "", "num_tokens": 806}, {"title": "Vector Store", "text": "", "num_tokens": 804}, {"title": "Vector Store", "text": "", "num_tokens": 807}, {"title": "Vector Store", "text": "", "num_tokens": 808}, {"title": "Vector Store", "text": "", "num_tokens": 809}, {"title": "Vector Store", "text": "", "num_tokens": 804}, {"title": "Vector Store", "text": "", "num_tokens": 801}, {"title": "Vector Store", "text": "", "num_tokens": 807}, {"title": "Vector Store", "text": "", "num_tokens": 819}, {"title": "Vector Store", "text": "", "num_tokens": 807}, {"title": "Vector Store", "text": "", "num_tokens": 805}, {"title": "Vector Store", "text": "", "num_tokens": 815}, {"title": "Vector Store", "text": "", "num_tokens": 804}, {"title": "Vector Store", "text": "", "num_tokens": 809}, {"title": "Vector Store", "text": "", "num_tokens": 801}, {"title": "Vector Store", "text": "", "num_tokens": 815}, {"title": "Vector Store", "text": "", "num_tokens": 806}, {"title": "Vector Store", "text": "", "num_tokens": 840}, {"title": "Vector Store", "text": "", "num_tokens": 888}, {"title": "Vector Store", "text": "", "num_tokens": 853}, {"title": "Vector Store", "text": "", "num_tokens": 816}, {"title": "Vector Store", "text": "", "num_tokens": 803}, {"title": "Vector Store", "text": "", "num_tokens": 802}, {"title": "Vector Store", "text": "", "num_tokens": 804}, {"title": "Vector Store", "text": "", "num_tokens": 803}, {"title": "Vector Store", "text": "", "num_tokens": 801}, {"title": "Vector Store", "text": "", "num_tokens": 827}] +[] +[{"title": "KV Storage", "text": "", "num_tokens": 833}] +[{"title": "HuggingFaceLLM", "text": "", "num_tokens": 804}] +[] +[{"title": "Azure OpenAI", "text": "", "num_tokens": 805}] +[] +[] +[{"title": "OpenAI", "text": "", "num_tokens": 807}] +[{"title": "Anthropic", "text": "", "num_tokens": 803}] +[] +[{"title": "LlamaCPP", "text": "", "num_tokens": 807}] +[] +[] +[] +[] +[] +[{"title": "Response Synthesizer", "text": "", "num_tokens": 805}, {"title": "Response Synthesizer", "text": "", "num_tokens": 811}, {"title": "Response Synthesizer", "text": "", "num_tokens": 801}, {"title": "Response Synthesizer", "text": "", "num_tokens": 811}, {"title": "Response Synthesizer", "text": "", "num_tokens": 821}] +[] +[] +[{"title": "Chat Engines", "text": "", "num_tokens": 806}, {"title": "Chat Engines", "text": "", "num_tokens": 805}, {"title": "Chat Engines", "text": "", "num_tokens": 803}, {"title": "Chat Engines", "text": "", "num_tokens": 810}, {"title": "Chat Engines", "text": "", "num_tokens": 801}, {"title": "Chat Engines", "text": "", "num_tokens": 810}, {"title": "Chat Engines", "text": "", "num_tokens": 809}] +[] +[] +[{"title": "Knowledge Graph Retriever", "text": "", "num_tokens": 810}, {"title": "Knowledge Graph Retriever", "text": "", "num_tokens": 808}, {"title": "Knowledge Graph Retriever", "text": "", "num_tokens": 810}, {"title": "Knowledge Graph Retriever", "text": "", "num_tokens": 801}, {"title": "Knowledge Graph Retriever", "text": "", "num_tokens": 803}] +[] +[] +[{"title": "Vector Store Retrievers", "text": "", "num_tokens": 817}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 808}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 810}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 803}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 864}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 835}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 811}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 804}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 801}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 801}, {"title": "Vector Store Retrievers", "text": "", "num_tokens": 807}] +[{"title": "Keyword Table Retrievers", "text": "", "num_tokens": 864}] +[{"title": "Tree Retrievers", "text": "", "num_tokens": 804}] +[] +[] +[] +[] +[] +[] +[] +[{"title": "SQL Query Engine", "text": "", "num_tokens": 838}] +[] +[{"title": "Sub Question Query Engine", "text": "", "num_tokens": 807}, {"title": "Sub Question Query Engine", "text": "", "num_tokens": 802}] +[] +[] +[{"title": "Citation Query Engine", "text": "", "num_tokens": 922}] +[] +[] +[] +[{"title": "from llama_index import (", "text": "", "num_tokens": 836}] +[{"title": "Auto-Retrieval from a Vector Database", "text": "", "num_tokens": 807}] +[] +[{"title": "Zep Vector Store", "text": "", "num_tokens": 818}, {"title": "Zep Vector Store", "text": "", "num_tokens": 812}] +[{"title": "Typesense Vector Store", "text": "", "num_tokens": 802}] +[] +[{"title": "Cassandra Vector Store", "text": "", "num_tokens": 807}, {"title": "Cassandra Vector Store", "text": "", "num_tokens": 803}, {"title": "Cassandra Vector Store", "text": "", "num_tokens": 808}] +[] +[{"title": "Chroma", "text": "", "num_tokens": 803}, {"title": "Chroma", "text": "", "num_tokens": 817}] +[] +[{"title": "Postgres Vector Store", "text": "", "num_tokens": 817}] +[{"title": "DocArray InMemory Vector Store", "text": "", "num_tokens": 802}] +[] +[{"title": "Weaviate Vector Store", "text": "", "num_tokens": 804}] +[{"title": "LlamaIndex + Pinecone", "text": "", "num_tokens": 807}, {"title": "LlamaIndex + Pinecone", "text": "", "num_tokens": 816}, {"title": "LlamaIndex + Pinecone", "text": "", "num_tokens": 810}, {"title": "LlamaIndex + Pinecone", "text": "", "num_tokens": 809}, {"title": "LlamaIndex + Pinecone", "text": "", "num_tokens": 807}, {"title": "LlamaIndex + Pinecone", "text": "", "num_tokens": 803}, {"title": "LlamaIndex + Pinecone", "text": "", "num_tokens": 823}] +[] +[] +[{"title": "DeepLake Vector Store", "text": "", "num_tokens": 803}, {"title": "DeepLake Vector Store", "text": "", "num_tokens": 803}] +[{"title": "Redis Vector Store", "text": "", "num_tokens": 801}, {"title": "Redis Vector Store", "text": "", "num_tokens": 811}, {"title": "Redis Vector Store", "text": "", "num_tokens": 802}, {"title": "Redis Vector Store", "text": "", "num_tokens": 814}] +[] +[{"title": "Local Llama2 + VectorStoreIndex", "text": "", "num_tokens": 833}, {"title": "Local Llama2 + VectorStoreIndex", "text": "", "num_tokens": 852}, {"title": "Local Llama2 + VectorStoreIndex", "text": "", "num_tokens": 818}, {"title": "Local Llama2 + VectorStoreIndex", "text": "", "num_tokens": 807}, {"title": "Local Llama2 + VectorStoreIndex", "text": "", "num_tokens": 804}, {"title": "Local Llama2 + VectorStoreIndex", "text": " [Document(id_='e78be222-56c7-4bca-8257-ae2bf4c1c74b', embedding=None, metadata={}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={}, hash='4c702b4df575421e1d1af4b1fd50511b226e0c9863dbfffeccb8b689b8448f35', text='\\t\\t\\n\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines \u2014 CPU, disk drives, printer, card reader \u2014 sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and t", "num_tokens": 168}] +[{"title": "Auto-Retrieval from a Vector Database", "text": "", "num_tokens": 809}, {"title": "Auto-Retrieval from a Vector Database", "text": "", "num_tokens": 829}] +[{"title": "Azure Cognitive Search", "text": "", "num_tokens": 804}, {"title": "Azure Cognitive Search", "text": "", "num_tokens": 801}] +[{"title": "BagelDB", "text": "", "num_tokens": 802}, {"title": "BagelDB", "text": "", "num_tokens": 802}] +[{"title": "Supabase Vector Store", "text": "", "num_tokens": 801}] +[{"title": "S3/R2 Storage", "text": "", "num_tokens": 810}] +[] +[{"title": "Qdrant Vector Store", "text": "", "num_tokens": 802}] +[{"title": "Simple Vector Store", "text": "", "num_tokens": 809}, {"title": "Simple Vector Store", "text": "", "num_tokens": 949}] +[{"title": "Simple Vector Store - Async Index Creation", "text": "", "num_tokens": 801}, {"title": "Simple Vector Store - Async Index Creation", "text": " Response(response=\"\\n\\nThe name 'Jakarta' is derived from the word Jayakarta (Devanagari: \u091c\u092f\u0915\u0930\u094d\u0924) which is ultimately derived from the Sanskrit \u091c\u092f jaya (victorious), and \u0915\u0943\u0924 krta (accomplished, acquired), thus Jayakarta translates as 'victorious deed', 'complete act' or 'complete victory'. It was named for the Muslim troops of Fatahillah which successfully defeated and drove the Portuguese away from the city in 1527. Before it was called Jayakarta, the city was known as 'Sunda Kelapa'. Tom\u00e9 Pires, a Portuguese apothecary wrote the name of the city on his magnum opus as Jacatra or Jacarta during his journey to East Indies. The city is located in a low-lying area ranging from \u22122 to 91 m (\u22127 to 299 ft) with an average elevation of 8 m (26 ft) above sea level with historically extensive swampy areas. Some parts of the city have been constructed on reclaimed tidal flats that occur around the area. Thirteen rivers flow through Jakarta, including the Ciliwung River, Kalibaru, Pesanggra\", source_nodes=[SourceNode(source_text=\"Jakarta (; Indonesian pronunciation: [d\u0292a\u02c8karta] (listen)), officially the Special Capital Region of Jakarta (Indonesian: Daerah Khusus Ibukota Jakarta), is the capital and largest city of Indonesia. Lying on the northwest coast of Java, the world's most populous island, Jakarta is the largest city in Southeast Asia and serves as the diplomatic capital of ASEAN.\\nThe city is the economic, cultural, and political centre of Indonesia. It possesses a province-level status and has a population of 10,562,088 as of mid-2021. Although Jakarta extends over only 664.01 km2 (256.38 sq mi) and thus has the smallest area of any Indonesian province, its metropolitan area covers 9,957.08 km2 (3,844.45 sq mi), which includes the satellite cities Bogor, Depok, Tangerang, South Tangerang, and Bekasi, and has an estimated population of 35 million as of 2021, making it the largest urban area in Indonesia and the second-largest in the world (after Tokyo). Jakarta ranks first among the Indonesian provinces in the human development index. Jakarta's business and employment opportunities, along with its ability to offer a potentially higher standard of living compared to other parts of the country, have attracted migrants from across the Indonesian archipelago, making it a melting pot of numerous cultures.\\nJakarta is one of the oldest continuously inhabited cities in Southeast Asia. Established in the fourth century as Sunda Kelapa, the city became an important trading port for the Sunda Kingdom. At one time, it was the de facto capital of the Dutch East Indies, when it was known as Batavia. Jakarta was officially a city within West Java until 1960 when its official status was changed to a province with special capital region distinction. As a province, its government consists of five administrative cities and one administrative regency. Jakarta is an alpha world city and is the seat of the ASEAN secretariat. Financial institutions such as the Bank of Indonesia, Indonesia Stock Exchange, and corporate headquarters of numerous Indonesian companies and multinational corporations are located in the city. In 2021, the city's GRP PPP was estimated at US", "num_tokens": 181}] +[] +[{"title": "Opensearch Vector Store", "text": "", "num_tokens": 851}] +[{"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 811}, {"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 809}, {"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 806}, {"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 806}, {"title": "Timescale Vector Store (PostgreSQL)", "text": " [-0.005366453900933266, 0.0016374519327655435, 0.005981510039418936, -0.026256779208779335, -0.03944991156458855, 0.026299940422177315, -0.0200558640062809, -0.01252412423491478, -0.04241368919610977, -0.004758591763675213, 0.05639812350273132, 0.006578581873327494, 0.014833281747996807, 0.009509989991784096, 0.0009675443288870156, -0.013157163746654987, -0.002265996066853404, -0.017048921436071396, 0.006553404498845339, -0.00217068032361567, 0.009085564874112606, 0.011775985360145569, -0.02514895796775818, -0.002679630182683468, 0.0030608929228037596, -3.439458305365406e-05, -0.00363818253390491, -0.03939236328005791, 0.0016806137282401323, -0.01207092497497797, 0.01739421673119068, -0.02241537719964981, -0.01753808930516243, -0.023782167583703995, -0.01598426327109337, -0.02575322426855564, -0.016876274719834328, -0.006380756851285696, -0.0009149408433586359, 0.00704616867005825, -0.0013290246715769172, -0.009776154533028603, -0.013200325891375542, -0.024832438677549362, -0.0019404839258641005, 0.027220726013183594, -0.004765785299241543, -0.008553235791623592, -0.023120352998375893, 0.006920279935002327, 0.017739512026309967, 0.0166892409324646, -0.019408436492085457, 0.010207772254943848, 0.01595548912882805, 0.004783769138157368, 0.008855368942022324, 0.018084805458784103, -0.012603254057466984, -0.002003428293392062, -0.0008407564600929618, 0.00394211383536458, -0.018948042765259743, 0.005722539033740759, -0.004244246520102024, -0.011502627283334732, -0.000936971337068826, 0.006873521022498608, -0.0038593867793679237, 0.0003349537728354335, 0.02490437589585781, 0.022861381992697716, -0.013833366334438324, 0.005657796282321215, 0.027896929532289505, -0.020415544509887695, -0.007143282797187567, 0.014862056821584702, -0.00667569600045681, -0.020199736580252647, 0.01827184110879898, -0.0030698850750923157, -0.032975636422634125, 0.02595464698970318, -0.0014818893978372216, -0.004906061105430126, 0.01008548028767109, 0.009337342344224453, -0.009833703748881817, -0.0011680669849738479, 0.010653777979314327, -0.0006110096583142877, 0.016228847205638885, -0.010589035227894783, 0.0010997274657711387, 0.020300446078181267, 0.005715345498174429, 0.009862477891147137, -0.0015664147213101387, -0.009207856841385365, -0.013480877503752708, -0.01759563945233822, 0.007992131635546684, -0.012639221735298634, -0.016833113506436348, -0.01654536835849285, 0.009366116486489773, 0.004229859448969364, -0.0044168937020003796, -0.00028122629737481475, -0.028918424621224403, 0.030616123229265213, -0.017020147293806076, -0.02500508539378643, 0.01844448782503605, 0.00011554780940059572, 0.021278781816363335, -0.01503470353782177, -0.024760503321886063, -0.02408429980278015, 0.03734936937689781, 0.000861438165884465, 0.021365106105804443, -0.006740438751876354, 0.005557085387408733, -0.017005760222673416, -0.01831500232219696, -0.01458150427788496, -0.0207896139472723, -0.004100373946130276, 0.011214882135391235, 0.03228504955768585, 0.00543119665235281, 0.02251608669757843, 0.011373141780495644, 0.0207896139472723, 0.004032033961266279, 0.019768116995692253, -0.016329558566212654, -0.02755163423717022, -0.0001643296709517017, 0.04163677617907524, -0.02163846418", "num_tokens": 499}, {"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 813}, {"title": "Timescale Vector Store (PostgreSQL)", "text": " VectorStoreQueryResult(nodes=[TextNode(id_='22747180-31f1-11ee-bd8e-101e36c28c91', embedding=None, metadata={'commit': ' 7aeed663b9c0f337b530fd6cad47704a51a9b2ec', 'author': 'Dmitry Simonenko', 'date': '2023-08-3 14:30:23+0500'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={}, hash='3273f20a98f02c75847896b929888b05e8751ae5e258d7feb8605bd5290ef8ca', text='Thu Aug 3 14:30:23 2023 +0300 Dmitry Simonenko Feature flags for TimescaleDB features This PR adds several GUCs which allow to enable/disable major timescaledb features: - enable_hypertable_create - enable_hypertable_compression - enable_cagg_create - enable_policy_create ', start_char_idx=None, end_char_idx=None, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n'), TextNode(id_='faa8ea00-4686-11ee-b933-c2c7df407c25', embedding=None, metadata={'commit': ' e4facda540286b0affba47ccc63959fefe2a7b26', 'author': 'Sven Klemm', 'date': '2023-08-29 18:13:24+0320'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={}, hash='6f45ab1cccf673ddf75c625983b6cf2f4a66bbf865a4c1c65025997a470f3bb3', text='Tue Aug 29 18:13:24 2023 +0200 Sven Klemm Add compatibility layer for _timescaledb_internal functions With timescaledb 2.12 all the functions present in _timescaledb_internal were moved into the _timescaledb_functions schema to improve schema security. This patch adds a compatibility layer so external callers of these internal functions will not break and allow for more flexibility when migrating. ', start_char_idx=None, end_char_idx=None, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n'), TextNode(id_='d7080180-40d2-11ee-af6f-f43e81a0925a', embedding=None, metadata={'commit': ' cf04496e4b4237440274eb25e4e02472fc4e06fc', 'author': 'Sven Klemm', 'date': '2023-08-22 12:01:19+0320'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={}, hash='d5a20dc83ae04f44aa901ba2f654e80ca68cb21f6a313bd91afcd91e404b471e', text='Tue Aug 22 12:01:19 2023 +0200 Sven Klemm Move utility functions to _timescaledb_functions schema To increase schema security we do not want to mix our own internal objects with user objects. Since chunks are created in the _timescaledb_internal schema our internal functions should live in a different dedicated schema. This patch make the necessary adjustments for the following functions: - generate_uuid() - get_git_commit() - get_os_info() - tsl_loaded() ', start_char_idx=None, end_char_idx=None, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n'), TextNode(id_='01b10780-4649-11ee-a375-5719b2881af3', embedding=None, metadata={'commit': ' a9751ccd5eb030026d7b975d22753f5964972389', 'author': 'Sven Klemm', 'date': '2023-08-29 10:49:47+0320'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={}, hash='8fde14d147def41808d82bf2ffa35e1e0ed78b0331962907cee856af34a34e44', text='Tue Aug 29 10:49:47 2023 +0200 Sven Klemm Move partitioning functions to _timescaledb_functions schema To increase schema security we do not want to mix", "num_tokens": 324}, {"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 880}, {"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 975}, {"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 842}, {"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 919}, {"title": "Timescale Vector Store (PostgreSQL)", "text": "", "num_tokens": 879}] +[] +[{"title": "Rockset Vector Store", "text": "", "num_tokens": 806}] +[{"title": "Elasticsearch Vector Store", "text": "", "num_tokens": 807}, {"title": "Elasticsearch Vector Store", "text": "", "num_tokens": 816}, {"title": "Elasticsearch Vector Store", "text": " custom query {'knn': {'filter': [{'match': {'content': 'growing up'}}], 'field': 'embedding', 'query_vector': [0.002520269714295864, -0.03282919153571129, 0.016138022765517235, -0.029537975788116455, -0.006744919344782829, 0.01626248098909855, -0.03703309968113899, 0.002381983445957303, -0.003031929489225149, -0.003616189584136009, 0.032746221870183945, 0.030201751738786697, 0.011726687662303448, 0.005043996497988701, 0.0030665011145174503, 0.016207166016101837, 0.018115518614649773, -0.008539185859262943, 0.020825933665037155, -0.011595315299928188, -0.027754081413149834, -0.004622223321348429, -0.004750138148665428, -0.015363619662821293, -0.006496003828942776, 0.012860636226832867, 0.02331508882343769, -0.009368903934955597, -0.002686213469132781, 0.0029818005859851837, 0.032441992312669754, 0.0015107790241017938, -0.0023059258237481117, 0.02384057641029358, -0.029233746230602264, 0.003574703587219119, 0.0048296526074409485, 0.019401581957936287, 0.01830912008881569, -0.009375818073749542, 0.037724532186985016, 0.026274416595697403, -0.016746483743190765, -0.005078568123281002, -0.02065998874604702, -0.012846807017922401, -0.002015524310991168, -0.01924946717917919, -0.0026568276807665825, 0.01626248098909855, -0.0002582066517788917, 0.027449851855635643, -0.011975603178143501, 0.013517496176064014, -0.005973972845822573, 0.002910928800702095, -0.00517536886036396, -0.004521965514868498, -0.012466519139707088, 0.0037890474777668715, 0.03454394266009331, 0.020729131996631622, 1.9514049199642614e-05, 0.010191707871854305, -0.0201068427413702, -0.0031131727155297995, 0.003581617958843708, -0.027270078659057617, 0.016151852905750275, 0.01658054068684578, 0.04679612070322037, -0.00013904266234021634, 0.01688477024435997, -0.00204491033218801, 0.014326471835374832, 0.0006266103009693325, -0.01454772986471653, -0.01425732858479023, -0.026039330288767815, 0.021296106278896332, 0.0022454254794865847, -0.03457160294055939, -0.028016826137900352, -0.009548676200211048, 0.0005151168443262577, -0.0019308238988742232, -0.00028759249835275114, 0.020203644409775734, -0.021890738978981972, 0.0035505034029483795, 0.04400273412466049, 0.038803163915872574, 0.021683309227228165, 0.02295554429292679, -0.03296747803688049, -0.007049149367958307, -0.012266004458069801, -0.009521018713712692, -0.013745669275522232, -0.004663709085434675, -0.01606888137757778, -0.0023162972647696733, -0.015944423153996468, -0.02537555620074272, -0.018945237621665, 0.0030181007459759712, 0.01265320647507906, 0.004712109453976154, 0.02267897129058838, -0.02790619619190693, -0.004788166843354702, 0.006188316736370325, -0.018170833587646484, -0.026302075013518333, -0.02126844972372055, -0.023785261437296867, 0.02508515492081642, 0.01951221190392971, -0.007896154187619686, -0.014098298735916615, 0.03213776275515556, -0.0026499133091419935, 0.01682945527136326, -0.007260036189109087, 0.017977232113480568, 0.00786849670112133, -0.027767909690737724, -0.009023187682032585, 0.010357651859521866, -0.0319441594183445, 0.013033493421971798, 0.01107674092054367, 0.022568341344594955, -0.015017903409898281, -0.027767909690737724, 0.02527875453233719, 0.0034174027387052774, 0.0026758420281112", "num_tokens": 548}] +[{"title": "Simple Vector Stores - Maximum Marginal Relevance Retrieval", "text": "", "num_tokens": 804}, {"title": "Simple Vector Stores - Maximum Marginal Relevance Retrieval", "text": "", "num_tokens": 828}, {"title": "Simple Vector Stores - Maximum Marginal Relevance Retrieval", "text": "", "num_tokens": 811}] +[{"title": "Pinecone Vector Store - Auto Retriever", "text": "", "num_tokens": 812}, {"title": "Pinecone Vector Store - Auto Retriever", "text": "", "num_tokens": 801}] +[{"title": "Milvus Vector Store", "text": "", "num_tokens": 811}] +[] +[] +[{"title": "Pinecone Vector Store", "text": "", "num_tokens": 815}] +[{"title": "Llama2 + VectorStoreIndex", "text": "", "num_tokens": 808}] +[{"title": "DocArray Hnsw Vector Store", "text": "", "num_tokens": 803}] +[{"title": "Epsilla Vector Store", "text": "", "num_tokens": 813}] +[{"title": "LanceDB Vector Store", "text": "", "num_tokens": 802}] +[{"title": "Guide: Using Vector Store Index with Existing Weaviate Vector Store", "text": "", "num_tokens": 804}] +[{"title": "Guide: Using Vector Store Index with Existing Pinecone Vector Store", "text": "", "num_tokens": 802}] +[{"title": "Vectara Vector Store", "text": "", "num_tokens": 808}] +[{"title": "Recursive Retriever + Node References", "text": "", "num_tokens": 801}, {"title": "Recursive Retriever + Node References", "text": "", "num_tokens": 809}, {"title": "Recursive Retriever + Node References", "text": "", "num_tokens": 802}] +[] +[{"title": "Reciprocal Rerank Fusion Retriever", "text": "", "num_tokens": 876}] +[{"title": "Simple Fusion Retriever", "text": "", "num_tokens": 807}] +[{"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 804}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 809}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 830}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 812}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 869}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 839}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 890}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 839}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 952}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 927}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 948}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 895}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 919}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 914}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 1042}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 805}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 1019}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 886}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 972}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 953}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 843}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 895}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 967}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 935}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 900}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 942}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 822}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 854}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 1021}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 808}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 866}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 830}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 880}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 1000}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 948}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 925}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 818}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 878}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 819}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 814}, {"title": "Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)", "text": "", "num_tokens": 922}] +[{"title": "Router Retriever", "text": "", "num_tokens": 812}, {"title": "Router Retriever", "text": "", "num_tokens": 802}, {"title": "Router Retriever", "text": "", "num_tokens": 805}, {"title": "Router Retriever", "text": "", "num_tokens": 816}] +[{"title": "Auto Merging Retriever", "text": "", "num_tokens": 813}, {"title": "Auto Merging Retriever", "text": "", "num_tokens": 802}, {"title": "Auto Merging Retriever", "text": "", "num_tokens": 806}, {"title": "Auto Merging Retriever", "text": "", "num_tokens": 831}, {"title": "Auto Merging Retriever", "text": "", "num_tokens": 804}] +[{"title": "BM25 Retriever", "text": "", "num_tokens": 812}, {"title": "BM25 Retriever", "text": "", "num_tokens": 807}, {"title": "BM25 Retriever", "text": "", "num_tokens": 803}] +[{"title": "Ensemble Query Engine Guide", "text": "", "num_tokens": 817}, {"title": "Ensemble Query Engine Guide", "text": "", "num_tokens": 810}, {"title": "Ensemble Query Engine Guide", "text": "", "num_tokens": 810}] +[{"title": "Recursive Retriever + Node References + Braintrust", "text": "", "num_tokens": 806}, {"title": "Recursive Retriever + Node References + Braintrust", "text": "", "num_tokens": 811}, {"title": "Recursive Retriever + Node References + Braintrust", "text": "", "num_tokens": 826}] +[{"title": "OnDemandLoaderTool Tutorial", "text": "", "num_tokens": 854}, {"title": "OnDemandLoaderTool Tutorial", "text": "", "num_tokens": 864}, {"title": "OnDemandLoaderTool Tutorial", "text": "", "num_tokens": 801}, {"title": "OnDemandLoaderTool Tutorial", "text": "", "num_tokens": 861}, {"title": "OnDemandLoaderTool Tutorial", "text": "", "num_tokens": 805}, {"title": "OnDemandLoaderTool Tutorial", "text": "", "num_tokens": 823}, {"title": "OnDemandLoaderTool Tutorial", "text": "", "num_tokens": 942}] +[] +[{"title": "Llama API", "text": "", "num_tokens": 815}] +[{"title": "LiteLLM", "text": "", "num_tokens": 817}, {"title": "LiteLLM", "text": "", "num_tokens": 818}] +[{"title": "Azure OpenAI", "text": "", "num_tokens": 875}] +[] +[{"title": "PaLM", "text": "", "num_tokens": 851}] +[{"title": "Replicate - Llama 2 13B", "text": "", "num_tokens": 808}] +[{"title": "LLM Predictor", "text": "", "num_tokens": 854}] +[{"title": "OpenAI", "text": "", "num_tokens": 810}] +[{"title": "Portkey", "text": "", "num_tokens": 809}, {"title": "Portkey", "text": "", "num_tokens": 807}, {"title": "Portkey", "text": "", "num_tokens": 806}, {"title": "Portkey", "text": "", "num_tokens": 811}] +[{"title": "Anthropic", "text": "", "num_tokens": 824}, {"title": "Anthropic", "text": "", "num_tokens": 805}] +[{"title": "Anyscale", "text": "", "num_tokens": 820}] +[{"title": "Konko", "text": "", "num_tokens": 805}, {"title": "Konko", "text": "", "num_tokens": 807}] +[] +[{"title": "Replicate - Vicuna 13B", "text": "", "num_tokens": 801}] +[{"title": "#Monster API LLM Integration into LLamaIndex", "text": "", "num_tokens": 801}] +[{"title": "LlamaCPP", "text": "", "num_tokens": 801}, {"title": "LlamaCPP", "text": "", "num_tokens": 812}, {"title": "LlamaCPP", "text": "", "num_tokens": 827}, {"title": "LlamaCPP", "text": "", "num_tokens": 835}] +[] +[] +[{"title": "Cohere", "text": "", "num_tokens": 824}, {"title": "Cohere", "text": "", "num_tokens": 801}, {"title": "Cohere", "text": "", "num_tokens": 814}] +[{"title": "Ollama - Llama 2 7B", "text": "", "num_tokens": 802}] +[{"title": "\ud83e\udd99 x \ud83e\udd99 Rap Battle", "text": "", "num_tokens": 840}, {"title": "\ud83e\udd99 x \ud83e\udd99 Rap Battle", "text": "", "num_tokens": 817}] +[{"title": "Xorbits Inference", "text": "", "num_tokens": 802}, {"title": "Xorbits Inference", "text": "", "num_tokens": 805}] +[] +[] +[{"title": "Local Embeddings with HuggingFace", "text": "", "num_tokens": 814}, {"title": "Local Embeddings with HuggingFace", "text": "", "num_tokens": 818}] +[] +[] +[] +[] +[] +[] +[] +[{"title": "Playground", "text": "", "num_tokens": 1069}] +[] +[] +[] +[{"title": "HTML Tag Reader", "text": "", "num_tokens": 810}] +[] +[] +[] +[] +[] +[] +[{"title": "MyScale Reader", "text": "", "num_tokens": 812}, {"title": "MyScale Reader", "text": " Document(text='\\t\\t\\n\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines \u2014 CPU, disk drives, printer, card reader \u2014 sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my ", "num_tokens": 0}, {"title": "MyScale Reader", "text": " Document(text='write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\\n\\nThis had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\\n\\nIn the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\\n\\nI\\'ve worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I\\'d always write essays too.\\n\\nI knew that online essays would be a marginal medium at first. Socially they\\'d seem more like rants posted by nutjobs on their GeoCities sites than the genteel and beautifully typeset compositions published in The New Yorker. But by this point I knew enough to find that encouraging instead of discouraging.\\n\\nOne of the most conspicuous patterns I\\'ve noticed in my life is how well it has worked, for me at least, to work on things that weren\\'t prestigious. Still life has always been the least prestigious form of painting. Viaweb and Y Combinator both seemed lame when we started them. I still get the glassy eye from strangers when they ask what I\\'m writing, and I explain that it\\'s an essay I\\'m going to publish on my web site. Even Lisp, though prestigious intellectually in something like the way Latin is, also seems about as hip.\\n\\nIt\\'s not that unprestigious types of work are good per se. But when you find yourself drawn to some kind of work despite its current lack of prestige, it\\'s a sign both that there\\'s something real to be discovered there, and that you have the right kind of motives. Impure motives are a big danger for the ambitious. If anything is going to lead you astray, it will be the desire to impress people. So while working on things that aren\\'t prestigious doesn\\'t guarantee you\\'re on the right track, it at least guarantees you\\'re not on the most common type of wrong one.\\n\\nOver the next several years I wrote lots of essays about all kinds of different topics. O\\'Reilly reprinted a collection of them as a book, called Hackers & Painters after one of the essays in it. I also worked on spam filters, and did some more painting. I used to have dinners for a group of friends every thursday night, wh", "num_tokens": 0}, {"title": "MyScale Reader", "text": " Document(text='YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I\\'d better work very hard.\\n\\nOne day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember his exact words very clearly. \"You know,\" he said, \"you should make sure Y Combinator isn\\'t the last cool thing you do.\"\\n\\nAt the time I didn\\'t understand what he meant, but gradually it dawned on me that he was saying I should quit. This seemed strange advice, because YC was doing great. But if there was one thing rarer than Rtm offering advice, it was Rtm being wrong. So this set me thinking. It was true that on my current trajectory, YC would be the last thing I did, because it was only taking up more of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life\\'s work or I\\'d have to leave eventually. And it wasn\\'t, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn\\'t, so we decided we\\'d try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn\\'t be controlled by the founders. So if Sam said yes, we\\'d let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he\\'d take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I\\'m interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm\\'s advice hadn\\'t included anything about that. I wanted to do something completely different, so I decided I\\'d paint. I wanted to see how good I could get if I rea", "num_tokens": 0}, {"title": "MyScale Reader", "text": " Document(text='funding to live on.\\n\\nWe originally hoped to launch in September, but we got more ambitious about the software as we worked on it. Eventually we managed to build a WYSIWYG site builder, in the sense that as you were creating pages, they looked exactly like the static ones that would be generated later, except that instead of leading to static pages, the links all referred to closures stored in a hash table on the server.\\n\\nIt helped to have studied art, because the main goal of an online store builder is to make users look legit, and the key to looking legit is high production values. If you get page layouts and fonts and colors right, you can make a guy running a store out of his bedroom look more legit than a big company.\\n\\n(If you\\'re curious why my site looks so old-fashioned, it\\'s because it\\'s still made with this software. It may look clunky today, but in 1996 it was the last word in slick.)\\n\\nIn September, Robert rebelled. \"We\\'ve been working on this for a month,\" he said, \"and it\\'s still not done.\" This is funny in retrospect, because he would still be working on it almost 3 years later. But I decided it might be prudent to recruit more programmers, and I asked Robert who else in grad school with him was really good. He recommended Trevor Blackwell, which surprised me at first, because at that point I knew Trevor mainly for his plan to reduce everything in his life to a stack of notecards, which he carried around with him. But Rtm was right, as usual. Trevor turned out to be a frighteningly effective hacker.\\n\\nIt was a lot of fun working with Robert and Trevor. They\\'re the two most independent-minded people I know, and in completely different ways. If you could see inside Rtm\\'s brain it would look like a colonial New England church, and if you could see inside Trevor\\'s it would look like the worst excesses of Austrian Rococo.\\n\\nWe opened for business, with 6 stores, in January 1996. It was just as well we waited a few months, because although we worried we were late, we were actually almost fatally early. There was a lot of talk in the press then about ecommerce, but not many people actually wanted online stores. [8]\\n\\nThere were three main parts to the software: the editor, which people used to build sites and which I wrote, the shopping cart, which Robert wrote, and the manager, which kept track of orders and statistics, and which Trevor wrote. In its time, the editor was one of the best general-purpose site builders. I kept the code tight and didn\\'t have to integrate with any other software except Robert\\'s and Trevor\\'s, so it was quite fun to work on. If all I\\'d had to do was work on this software, the next 3 years would have been the easiest of my life. Unfortunately I had to do a lot more, all of it stuff I was worse at than programming, and the next 3 years were instead the most stressful.\\n\\nThere were a lot of startups making ecommerce software in the second half of the 90s. We were determined to be the Microsoft Word, not the Interleaf. Which meant being easy to use and inexpensive. It was lucky for us that we were poor, because that caused us to make Viaweb even more inexpensive than we r", "num_tokens": 0}, {"title": "MyScale Reader", "text": " Document(text='a nude model sitting as close to it as possible without getting burned. Except hardly anyone else painted her besides me. The rest of the students spent their time chatting or occasionally trying to imitate things they\\'d seen in American art magazines.\\n\\nOur model turned out to live just down the street from me. She made a living from a combination of modelling and making fakes for a local antique dealer. She\\'d copy an obscure old painting out of a book, and then he\\'d take the copy and maltreat it to make it look old. [3]\\n\\nWhile I was a student at the Accademia I started painting still lives in my bedroom at night. These paintings were tiny, because the room was, and because I painted them on leftover scraps of canvas, which was all I could afford at the time. Painting still lives is different from painting people, because the subject, as its name suggests, can\\'t move. People can\\'t sit for more than about 15 minutes at a time, and when they do they don\\'t sit very still. So the traditional m.o. for painting people is to know how to paint a generic person, which you then modify to match the specific person you\\'re painting. Whereas a still life you can, if you want, copy pixel by pixel from what you\\'re seeing. You don\\'t want to stop there, of course, or you get merely photographic accuracy, and what makes a still life interesting is that it\\'s been through a head. You want to emphasize the visual cues that tell you, for example, that the reason the color changes suddenly at a certain point is that it\\'s the edge of an object. By subtly emphasizing such things you can make paintings that are more realistic than photographs not just in some metaphorical sense, but in the strict information-theoretic sense. [4]\\n\\nI liked painting still lives because I was curious about what I was seeing. In everyday life, we aren\\'t consciously aware of much we\\'re seeing. Most visual perception is handled by low-level processes that merely tell your brain \"that\\'s a water droplet\" without telling you details like where the lightest and darkest points are, or \"that\\'s a bush\" without telling you the shape and position of every leaf. This is a feature of brains, not a bug. In everyday life it would be distracting to notice every leaf on every bush. But when you have to paint something, you have to look more closely, and when you do there\\'s a lot to see. You can still be noticing new things after days of trying to paint something people usually take for granted, just as you can after days of trying to write an essay about something people usually take for granted.\\n\\nThis is not the only way to paint. I\\'m not 100% sure it\\'s even a good way to paint. But it seemed a good enough bet to be worth trying.\\n\\nOur teacher, professor Ulivi, was a nice guy. He could see I worked hard, and gave me a good grade, which he wrote down in a sort of passport each student had. But the Accademia wasn\\'t teaching me anything except Italian, and my money was running out, so at the end of the first year I went back to the US.\\n\\nI wanted to go back to RISD, but I was now broke and RISD was very expensive, so I decided to get a job for a year and then re", "num_tokens": 0}, {"title": "MyScale Reader", "text": " [Document(text='funding to live on.\\n\\nWe originally hoped to launch in September, but we got more ambitious about the software as we worked on it. Eventually we managed to build a WYSIWYG site builder, in the sense that as you were creating pages, they looked exactly like the static ones that would be generated later, except that instead of leading to static pages, the links all referred to closures stored in a hash table on the server.\\n\\nIt helped to have studied art, because the main goal of an online store builder is to make users look legit, and the key to looking legit is high production values. If you get page layouts and fonts and colors right, you can make a guy running a store out of his bedroom look more legit than a big company.\\n\\n(If you\\'re curious why my site looks so old-fashioned, it\\'s because it\\'s still made with this software. It may look clunky today, but in 1996 it was the last word in slick.)\\n\\nIn September, Robert rebelled. \"We\\'ve been working on this for a month,\" he said, \"and it\\'s still not done.\" This is funny in retrospect, because he would still be working on it almost 3 years later. But I decided it might be prudent to recruit more programmers, and I asked Robert who else in grad school with him was really good. He recommended Trevor Blackwell, which surprised me at first, because at that point I knew Trevor mainly for his plan to reduce everything in his life to a stack of notecards, which he carried around with him. But Rtm was right, as usual. Trevor turned out to be a frighteningly effective hacker.\\n\\nIt was a lot of fun working with Robert and Trevor. They\\'re the two most independent-minded people I know, and in completely different ways. If you could see inside Rtm\\'s brain it would look like a colonial New England church, and if you could see inside Trevor\\'s it would look like the worst excesses of Austrian Rococo.\\n\\nWe opened for business, with 6 stores, in January 1996. It was just as well we waited a few months, because although we worried we were late, we were actually almost fatally early. There was a lot of talk in the press then about ecommerce, but not many people actually wanted online stores. [8]\\n\\nThere were three main parts to the software: the editor, which people used to build sites and which I wrote, the shopping cart, which Robert wrote, and the manager, which kept track of orders and statistics, and which Trevor wrote. In its time, the editor was one of the best general-purpose site builders. I kept the code tight and didn\\'t have to integrate with any other software except Robert\\'s and Trevor\\'s, so it was quite fun to work on. If all I\\'d had to do was work on this software, the next 3 years would have been the easiest of my life. Unfortunately I had to do a lot more, all of it stuff I was worse at than programming, and the next 3 years were instead the most stressful.\\n\\nThere were a lot of startups making ecommerce software in the second half of the 90s. We were determined to be the Microsoft Word, not the Interleaf. Which meant being easy to use and inexpensive. It was lucky for us that we were poor, because that caused us to make Viaweb even more inexpensive than we r", "num_tokens": 34}, {"title": "MyScale Reader", "text": " Document(text='write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\\n\\nThis had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\\n\\nIn the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\\n\\nI\\'ve worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I\\'d always write essays too.\\n\\nI knew that online essays would be a marginal medium at first. Socially they\\'d seem more like rants posted by nutjobs on their GeoCities sites than the genteel and beautifully typeset compositions published in The New Yorker. But by this point I knew enough to find that encouraging instead of discouraging.\\n\\nOne of the most conspicuous patterns I\\'ve noticed in my life is how well it has worked, for me at least, to work on things that weren\\'t prestigious. Still life has always been the least prestigious form of painting. Viaweb and Y Combinator both seemed lame when we started them. I still get the glassy eye from strangers when they ask what I\\'m writing, and I explain that it\\'s an essay I\\'m going to publish on my web site. Even Lisp, though prestigious intellectually in something like the way Latin is, also seems about as hip.\\n\\nIt\\'s not that unprestigious types of work are good per se. But when you find yourself drawn to some kind of work despite its current lack of prestige, it\\'s a sign both that there\\'s something real to be discovered there, and that you have the right kind of motives. Impure motives are a big danger for the ambitious. If anything is going to lead you astray, it will be the desire to impress people. So while working on things that aren\\'t prestigious doesn\\'t guarantee you\\'re on the right track, it at least guarantees you\\'re not on the most common type of wrong one.\\n\\nOver the next several years I wrote lots of essays about all kinds of different topics. O\\'Reilly reprinted a collection of them as a book, called Hackers & Painters after one of the essays in it. I also worked on spam filters, and did some more painting. I used to have dinners for a group of friends every thursday night, wh", "num_tokens": 34}, {"title": "MyScale Reader", "text": " Document(text='YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I\\'d better work very hard.\\n\\nOne day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember his exact words very clearly. \"You know,\" he said, \"you should make sure Y Combinator isn\\'t the last cool thing you do.\"\\n\\nAt the time I didn\\'t understand what he meant, but gradually it dawned on me that he was saying I should quit. This seemed strange advice, because YC was doing great. But if there was one thing rarer than Rtm offering advice, it was Rtm being wrong. So this set me thinking. It was true that on my current trajectory, YC would be the last thing I did, because it was only taking up more of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life\\'s work or I\\'d have to leave eventually. And it wasn\\'t, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn\\'t, so we decided we\\'d try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn\\'t be controlled by the founders. So if Sam said yes, we\\'d let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he\\'d take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I\\'m interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm\\'s advice hadn\\'t included anything about that. I wanted to do something completely different, so I decided I\\'d paint. I wanted to see how good I could get if I rea", "num_tokens": 34}] +[] +[{"title": "MilvusReader", "text": " [Document(text='YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I\\'d better work very hard.\\n\\nOne day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember his exact words very clearly. \"You know,\" he said, \"you should make sure Y Combinator isn\\'t the last cool thing you do.\"\\n\\nAt the time I didn\\'t understand what he meant, but gradually it dawned on me that he was saying I should quit. This seemed strange advice, because YC was doing great. But if there was one thing rarer than Rtm offering advice, it was Rtm being wrong. So this set me thinking. It was true that on my current trajectory, YC would be the last thing I did, because it was only taking up more of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life\\'s work or I\\'d have to leave eventually. And it wasn\\'t, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn\\'t, so we decided we\\'d try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn\\'t be controlled by the founders. So if Sam said yes, we\\'d let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he\\'d take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I\\'m interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm\\'s advice hadn\\'t included anything about that. I wanted to do something completely different, so I decided I\\'d paint. I wanted to see how good I could get if I rea", "num_tokens": 207}, {"title": "MilvusReader", "text": " Document(text='\\t\\t\\n\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines \u2014 CPU, disk drives, printer, card reader \u2014 sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my ", "num_tokens": 207}, {"title": "MilvusReader", "text": "", "num_tokens": 877}, {"title": "MilvusReader", "text": " Document(text='a nude model sitting as close to it as possible without getting burned. Except hardly anyone else painted her besides me. The rest of the students spent their time chatting or occasionally trying to imitate things they\\'d seen in American art magazines.\\n\\nOur model turned out to live just down the street from me. She made a living from a combination of modelling and making fakes for a local antique dealer. She\\'d copy an obscure old painting out of a book, and then he\\'d take the copy and maltreat it to make it look old. [3]\\n\\nWhile I was a student at the Accademia I started painting still lives in my bedroom at night. These paintings were tiny, because the room was, and because I painted them on leftover scraps of canvas, which was all I could afford at the time. Painting still lives is different from painting people, because the subject, as its name suggests, can\\'t move. People can\\'t sit for more than about 15 minutes at a time, and when they do they don\\'t sit very still. So the traditional m.o. for painting people is to know how to paint a generic person, which you then modify to match the specific person you\\'re painting. Whereas a still life you can, if you want, copy pixel by pixel from what you\\'re seeing. You don\\'t want to stop there, of course, or you get merely photographic accuracy, and what makes a still life interesting is that it\\'s been through a head. You want to emphasize the visual cues that tell you, for example, that the reason the color changes suddenly at a certain point is that it\\'s the edge of an object. By subtly emphasizing such things you can make paintings that are more realistic than photographs not just in some metaphorical sense, but in the strict information-theoretic sense. [4]\\n\\nI liked painting still lives because I was curious about what I was seeing. In everyday life, we aren\\'t consciously aware of much we\\'re seeing. Most visual perception is handled by low-level processes that merely tell your brain \"that\\'s a water droplet\" without telling you details like where the lightest and darkest points are, or \"that\\'s a bush\" without telling you the shape and position of every leaf. This is a feature of brains, not a bug. In everyday life it would be distracting to notice every leaf on every bush. But when you have to paint something, you have to look more closely, and when you do there\\'s a lot to see. You can still be noticing new things after days of trying to paint something people usually take for granted, just as you can after days of trying to write an essay about something people usually take for granted.\\n\\nThis is not the only way to paint. I\\'m not 100% sure it\\'s even a good way to paint. But it seemed a good enough bet to be worth trying.\\n\\nOur teacher, professor Ulivi, was a nice guy. He could see I worked hard, and gave me a good grade, which he wrote down in a sort of passport each student had. But the Accademia wasn\\'t teaching me anything except Italian, and my money was running out, so at the end of the first year I went back to the US.\\n\\nI wanted to go back to RISD, but I was now broke and RISD was very expensive, so I decided to get a job for a year and then re", "num_tokens": 0}, {"title": "MilvusReader", "text": " Document(text='write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\\n\\nThis had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\\n\\nIn the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\\n\\nI\\'ve worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I\\'d always write essays too.\\n\\nI knew that online essays would be a marginal medium at first. Socially they\\'d seem more like rants posted by nutjobs on their GeoCities sites than the genteel and beautifully typeset compositions published in The New Yorker. But by this point I knew enough to find that encouraging instead of discouraging.\\n\\nOne of the most conspicuous patterns I\\'ve noticed in my life is how well it has worked, for me at least, to work on things that weren\\'t prestigious. Still life has always been the least prestigious form of painting. Viaweb and Y Combinator both seemed lame when we started them. I still get the glassy eye from strangers when they ask what I\\'m writing, and I explain that it\\'s an essay I\\'m going to publish on my web site. Even Lisp, though prestigious intellectually in something like the way Latin is, also seems about as hip.\\n\\nIt\\'s not that unprestigious types of work are good per se. But when you find yourself drawn to some kind of work despite its current lack of prestige, it\\'s a sign both that there\\'s something real to be discovered there, and that you have the right kind of motives. Impure motives are a big danger for the ambitious. If anything is going to lead you astray, it will be the desire to impress people. So while working on things that aren\\'t prestigious doesn\\'t guarantee you\\'re on the right track, it at least guarantees you\\'re not on the most common type of wrong one.\\n\\nOver the next several years I wrote lots of essays about all kinds of different topics. O\\'Reilly reprinted a collection of them as a book, called Hackers & Painters after one of the essays in it. I also worked on spam filters, and did some more painting. I used to have dinners for a group of friends every thursday night, wh", "num_tokens": 0}, {"title": "MilvusReader", "text": " Document(text='funding to live on.\\n\\nWe originally hoped to launch in September, but we got more ambitious about the software as we worked on it. Eventually we managed to build a WYSIWYG site builder, in the sense that as you were creating pages, they looked exactly like the static ones that would be generated later, except that instead of leading to static pages, the links all referred to closures stored in a hash table on the server.\\n\\nIt helped to have studied art, because the main goal of an online store builder is to make users look legit, and the key to looking legit is high production values. If you get page layouts and fonts and colors right, you can make a guy running a store out of his bedroom look more legit than a big company.\\n\\n(If you\\'re curious why my site looks so old-fashioned, it\\'s because it\\'s still made with this software. It may look clunky today, but in 1996 it was the last word in slick.)\\n\\nIn September, Robert rebelled. \"We\\'ve been working on this for a month,\" he said, \"and it\\'s still not done.\" This is funny in retrospect, because he would still be working on it almost 3 years later. But I decided it might be prudent to recruit more programmers, and I asked Robert who else in grad school with him was really good. He recommended Trevor Blackwell, which surprised me at first, because at that point I knew Trevor mainly for his plan to reduce everything in his life to a stack of notecards, which he carried around with him. But Rtm was right, as usual. Trevor turned out to be a frighteningly effective hacker.\\n\\nIt was a lot of fun working with Robert and Trevor. They\\'re the two most independent-minded people I know, and in completely different ways. If you could see inside Rtm\\'s brain it would look like a colonial New England church, and if you could see inside Trevor\\'s it would look like the worst excesses of Austrian Rococo.\\n\\nWe opened for business, with 6 stores, in January 1996. It was just as well we waited a few months, because although we worried we were late, we were actually almost fatally early. There was a lot of talk in the press then about ecommerce, but not many people actually wanted online stores. [8]\\n\\nThere were three main parts to the software: the editor, which people used to build sites and which I wrote, the shopping cart, which Robert wrote, and the manager, which kept track of orders and statistics, and which Trevor wrote. In its time, the editor was one of the best general-purpose site builders. I kept the code tight and didn\\'t have to integrate with any other software except Robert\\'s and Trevor\\'s, so it was quite fun to work on. If all I\\'d had to do was work on this software, the next 3 years would have been the easiest of my life. Unfortunately I had to do a lot more, all of it stuff I was worse at than programming, and the next 3 years were instead the most stressful.\\n\\nThere were a lot of startups making ecommerce software in the second half of the 90s. We were determined to be the Microsoft Word, not the Interleaf. Which meant being easy to use and inexpensive. It was lucky for us that we were poor, because that caused us to make Viaweb even more inexpensive than we r", "num_tokens": 0}] +[] +[] +[] +[] +[] +[] +[] +[] +[] +[] +[] +[] +[{"title": "Streaming for Chat Engine - Condense Question Mode", "text": "", "num_tokens": 855}] +[] +[] +[] +[{"title": "HuggingFace LLM - StableLM", "text": "", "num_tokens": 814}, {"title": "HuggingFace LLM - StableLM", "text": "", "num_tokens": 805}] +[{"title": "Azure OpenAI", "text": "", "num_tokens": 807}, {"title": "Azure OpenAI", "text": "", "num_tokens": 846}] +[{"title": "HuggingFace LLM - Camel-5b", "text": "", "num_tokens": 825}] +[{"title": "ChatGPT", "text": "", "num_tokens": 806}] +[{"title": "Automated Metadata Extraction for Better Retrieval + Synthesis", "text": "", "num_tokens": 812}, {"title": "Automated Metadata Extraction for Better Retrieval + Synthesis", "text": "", "num_tokens": 834}, {"title": "Automated Metadata Extraction for Better Retrieval + Synthesis", "text": "", "num_tokens": 817}, {"title": "Automated Metadata Extraction for Better Retrieval + Synthesis", "text": "", "num_tokens": 815}] +[{"title": "Pydantic Extractor", "text": "", "num_tokens": 802}] +[{"title": "Entity Metadata Extraction", "text": "", "num_tokens": 802}] +[{"title": "Metadata Extraction and Augmentation w/ Marvin", "text": "", "num_tokens": 807}] +[{"title": "Extracting Metadata for Better Document Indexing and Understanding", "text": "", "num_tokens": 806}, {"title": "Extracting Metadata for Better Document Indexing and Understanding", "text": "", "num_tokens": 812}, {"title": "Extracting Metadata for Better Document Indexing and Understanding", "text": "", "num_tokens": 814}, {"title": "Extracting Metadata for Better Document Indexing and Understanding", "text": "", "num_tokens": 808}] +[{"title": "Llama Debug Handler", "text": " CBEvent(event_type=, payload={: [ChatMessage(role=, content=\"You are an expert Q&A system that is trusted around the world.\\nAlways answer the query using the provided context information, and not prior knowledge.\\nSome rules to follow:\\n1. Never directly reference the given context in your answer.\\n2. Avoid statements like 'Based on the context, ...' or 'The context information ...' or anything along those lines.\", additional_kwargs={}), ChatMessage(role=, content='Context information is below.\\n---------------------\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming.I didn\\'t write essays.I wrote what beginning writers were supposed to write then, and probably still are: short stories.My stories were awful.They had hardly any plot, just characters with strong feelings, which I imagined made them deep.The first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\"This was in 9th grade, so I was 13 or 14.The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it.It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines \u2014 CPU, disk drives, printer, card reader \u2014 sitting up on a raised floor under bright fluorescent lights.The language we used was an early version of Fortran.You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it.The result would ordinarily be to print something on the spectacularly loud printer.I was puzzled by the 1401.I couldn\\'t figure out what to do with it.And in retrospect there\\'s not much I could have done with it.The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards.The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type.So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much.My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t.On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.With microcomputers, everything changed.Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping.[1]\\n\\nThe first of my friends to get a microcomputer built it himself.It was sold as a kit by Heathkit.I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.Computers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980.The gold sta", "num_tokens": 604}, {"title": "Llama Debug Handler", "text": "", "num_tokens": 802}] +[{"title": "Token Counting Handler", "text": "", "num_tokens": 802}] +[{"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 821}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 888}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 849}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 879}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 815}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 892}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 844}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 840}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 876}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 916}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 872}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 853}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 851}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 825}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 832}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 858}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 821}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 922}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 855}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 803}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 814}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 802}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 801}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 813}, {"title": "OpenInference Callback Handler + Arize Phoenix", "text": "", "num_tokens": 811}] +[{"title": "HoneyHive LlamaIndex Tracer", "text": "", "num_tokens": 812}, {"title": "HoneyHive LlamaIndex Tracer", "text": "", "num_tokens": 817}, {"title": "HoneyHive LlamaIndex Tracer", "text": "", "num_tokens": 850}] +[] +[{"title": "Wandb Callback Handler", "text": "", "num_tokens": 809}, {"title": "Wandb Callback Handler", "text": "", "num_tokens": 814}, {"title": "Wandb Callback Handler", "text": "", "num_tokens": 803}] +[{"title": "Guardrails Output Parsing", "text": "", "num_tokens": 822}] +[{"title": "DataFrame Structured Data Extraction", "text": "", "num_tokens": 825}, {"title": "DataFrame Structured Data Extraction", "text": "", "num_tokens": 802}] +[] +[] +[] +[{"title": "Guidance for Sub-Question Query Engine", "text": "", "num_tokens": 806}, {"title": "Guidance for Sub-Question Query Engine", "text": "", "num_tokens": 824}] +[{"title": "OpenAI Pydantic Program", "text": "", "num_tokens": 804}, {"title": "OpenAI Pydantic Program", "text": "", "num_tokens": 810}] +[{"title": "Evaporate Demo", "text": "", "num_tokens": 806}, {"title": "Evaporate Demo", "text": "", "num_tokens": 849}, {"title": "Evaporate Demo", "text": "", "num_tokens": 809}, {"title": "Evaporate Demo", "text": "", "num_tokens": 808}, {"title": "Evaporate Demo", "text": "", "num_tokens": 802}, {"title": "Evaporate Demo", "text": "", "num_tokens": 848}, {"title": "Evaporate Demo", "text": "", "num_tokens": 803}, {"title": "Evaporate Demo", "text": "", "num_tokens": 812}, {"title": "Evaporate Demo", "text": "", "num_tokens": 897}] +[{"title": "Retrieval-Augmented OpenAI Agent", "text": "", "num_tokens": 807}] +[{"title": "ReAct Agent with Query Engine Tools", "text": "", "num_tokens": 802}, {"title": "ReAct Agent with Query Engine Tools", "text": "", "num_tokens": 828}, {"title": "ReAct Agent with Query Engine Tools", "text": "", "num_tokens": 836}, {"title": "ReAct Agent with Query Engine Tools", "text": "", "num_tokens": 895}] +[] +[{"title": "Multi-Document Agents (V1)", "text": "", "num_tokens": 801}, {"title": "Multi-Document Agents (V1)", "text": "", "num_tokens": 808}, {"title": "Multi-Document Agents (V1)", "text": "", "num_tokens": 812}, {"title": "Multi-Document Agents (V1)", "text": "", "num_tokens": 821}, {"title": "Multi-Document Agents (V1)", "text": "", "num_tokens": 845}, {"title": "Multi-Document Agents (V1)", "text": "", "num_tokens": 844}] +[{"title": "Context-Augmented OpenAI Agent", "text": "", "num_tokens": 802}, {"title": "Context-Augmented OpenAI Agent", "text": "", "num_tokens": 808}, {"title": "Context-Augmented OpenAI Agent", "text": "", "num_tokens": 814}, {"title": "Context-Augmented OpenAI Agent", "text": "", "num_tokens": 806}, {"title": "Context-Augmented OpenAI Agent", "text": "", "num_tokens": 808}, {"title": "Context-Augmented OpenAI Agent", "text": "", "num_tokens": 822}] +[{"title": "OpenAI Agent + Query Engine Experimental Cookbook", "text": "", "num_tokens": 805}, {"title": "OpenAI Agent + Query Engine Experimental Cookbook", "text": "", "num_tokens": 808}, {"title": "OpenAI Agent + Query Engine Experimental Cookbook", "text": "", "num_tokens": 820}, {"title": "OpenAI Agent + Query Engine Experimental Cookbook", "text": "", "num_tokens": 811}, {"title": "OpenAI Agent + Query Engine Experimental Cookbook", "text": "", "num_tokens": 861}, {"title": "OpenAI Agent + Query Engine Experimental Cookbook", "text": "", "num_tokens": 984}] +[{"title": "ReAct Agent", "text": "", "num_tokens": 801}] +[{"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 957}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 804}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 839}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 812}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 855}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 804}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 815}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 801}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 933}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 802}] +[{"title": "OpenAI Agent Query Planning", "text": "", "num_tokens": 805}, {"title": "OpenAI Agent Query Planning", "text": "", "num_tokens": 802}, {"title": "OpenAI Agent Query Planning", "text": "", "num_tokens": 848}, {"title": "OpenAI Agent Query Planning", "text": "", "num_tokens": 802}] +[] +[{"title": "Build your own OpenAI Agent", "text": "", "num_tokens": 806}, {"title": "Build your own OpenAI Agent", "text": "", "num_tokens": 842}, {"title": "Build your own OpenAI Agent", "text": "", "num_tokens": 828}] +[{"title": "Multi-Document Agents", "text": "", "num_tokens": 801}, {"title": "Multi-Document Agents", "text": "", "num_tokens": 810}, {"title": "Multi-Document Agents", "text": "", "num_tokens": 826}, {"title": "Multi-Document Agents", "text": "", "num_tokens": 825}, {"title": "Multi-Document Agents", "text": "", "num_tokens": 846}, {"title": "Multi-Document Agents", "text": "", "num_tokens": 810}, {"title": "Multi-Document Agents", "text": "", "num_tokens": 848}, {"title": "Multi-Document Agents", "text": "", "num_tokens": 867}] +[{"title": "Fine Tuning GPT-3.5-Turbo", "text": "", "num_tokens": 808}, {"title": "Fine Tuning GPT-3.5-Turbo", "text": "", "num_tokens": 804}, {"title": "Fine Tuning GPT-3.5-Turbo", "text": " {'role': 'user', 'content': 'Context information is below.\\n---------------------\\npage_label: 410\\nfile_name: IPCC_AR6_WGII_Chapter03.pdf\\n\\nIt is challenging to apply this experimental approach to communities or ecosystems (see Figure \\nBox\\xa03.1.1).To date, most research on community or ecosystem response to climate-induced drivers has been in large-volume (>10,000 l) \\nmesocosms (Riebesell and Gattuso, 2014), or at natural analogues such as CO 2 seeps, in which only one driver (ocean acidification) is \\naltered (see (4) in Figure Box\\xa03.1.1).Only very recently have two drivers been incorporated into climate-change manipulation studies \\nexamining responses of primary producers to secondary consumers (see (5) in Figure Box\\xa03.1.1a; Nagelkerken et\\xa0al., 2020).Therefore, \\n\u2018natural experiments\u2019 from the geological past (Reddin et\\xa0al., 2020) provide insights into how food webs and their constituents respond to \\ncomplex change involving multiple drivers.Contemporary observations are occasionally long enough (>50\\xa0years) to capture community \\nresponses to complex climate change.For example, Brun et\\xa0al.(2019) reported a shift in zooplankton community structure in the North \\nAtlantic (1960\u20132014), with major biogeochemical ramifications.Conducting sufficiently long manipulation experiments to study the effect of adaptation on organisms is equally difficult (see Figure \\nBox\\xa03.1.1b), with much research restricted to multi-year studies of the microevolution of fast-growing (more than one division per day) \\nphytoplankton species responding to single drivers (Lohbeck et\\xa0al., 2012; Schaum et\\xa0al., 2016).In a few experimental evolution studies \\n(see (7) in Figure Box\\xa03.1.1a; Brennan et\\xa0al., 2017), multiple drivers have been used, but none have used communities or ecosystems (see \\nFigure Box\\xa03.1.1b).Nevertheless, the fossil record provides limited evidence of adaptations to less rapid (relative to present day) climate \\nchange (Jackson et\\xa0al., 2018).Despite the need to explore ecological or biogeochemical responses to projected future ocean conditions, \\nlogistical challenges require that assessments of climate-change impacts at scales larger than mesocosms use large-scale, long-term in \\nsitu observational studies (as documented in Section\\xa03.4).\\n\\npage_label: 409\\nfile_name: IPCC_AR6_WGII_Chapter03.pdf\\n\\n3\\n409Oceans and Coastal Ecosystems and Their Services Chapter 3\\nunderlies inhibited thermal adaptation under nitrogen-limited \\nconditions (low confidence) (Aranguren-Gassis et\\xa0 al., 2019).When \\nselection is strong due to unfavourable environmental conditions, \\nmicrobial populations can encounter functional and evolutionary \\ntrade-offs evidenced by reducing growth rates while increasing \\ntolerance and metabolism of reactive oxygen species (Lindberg and \\nCollins, 2020).Other trade-offs can be observed in offspring quality \\nand number (Lindberg and Collins, 2020).These findings contribute \\ntowards a mechanistic framework describing the range of evolutionary \\nstrategies in response to multiple drivers (Collins et\\xa0al., 2020), but other \\nhazards, such as extreme events (e.g., MHWs), still nee", "num_tokens": 633}, {"title": "Fine Tuning GPT-3.5-Turbo", "text": "", "num_tokens": 846}, {"title": "Fine Tuning GPT-3.5-Turbo", "text": "", "num_tokens": 812}, {"title": "Fine Tuning GPT-3.5-Turbo", "text": "", "num_tokens": 803}] +[{"title": "Fine Tuning with Function Calling", "text": "", "num_tokens": 967}, {"title": "Fine Tuning with Function Calling", "text": "", "num_tokens": 861}, {"title": "Fine Tuning with Function Calling", "text": "", "num_tokens": 809}, {"title": "Fine Tuning with Function Calling", "text": "", "num_tokens": 802}, {"title": "Fine Tuning with Function Calling", "text": "", "num_tokens": 861}, {"title": "Fine Tuning with Function Calling", "text": "", "num_tokens": 808}] +[{"title": "Finetune Embeddings", "text": "", "num_tokens": 813}, {"title": "Finetune Embeddings", "text": "", "num_tokens": 805}, {"title": "Finetune Embeddings", "text": "", "num_tokens": 813}] +[{"title": "Finetuning an Adapter on Top of any Black-Box Embedding Model", "text": "", "num_tokens": 804}, {"title": "Finetuning an Adapter on Top of any Black-Box Embedding Model", "text": "", "num_tokens": 807}, {"title": "Finetuning an Adapter on Top of any Black-Box Embedding Model", "text": "", "num_tokens": 802}, {"title": "Finetuning an Adapter on Top of any Black-Box Embedding Model", "text": "", "num_tokens": 801}] +[{"title": "Fine-tuning with Retrieval Augmentation", "text": "", "num_tokens": 809}, {"title": "Fine-tuning with Retrieval Augmentation", "text": "", "num_tokens": 806}, {"title": "Fine-tuning with Retrieval Augmentation", "text": "", "num_tokens": 824}, {"title": "Fine-tuning with Retrieval Augmentation", "text": "", "num_tokens": 805}] +[{"title": "Fine-tuning to Memorize Knowledge", "text": "", "num_tokens": 807}, {"title": "Fine-tuning to Memorize Knowledge", "text": "", "num_tokens": 805}, {"title": "Fine-tuning to Memorize Knowledge", "text": "", "num_tokens": 814}, {"title": "Fine-tuning to Memorize Knowledge", "text": "", "num_tokens": 821}, {"title": "Fine-tuning to Memorize Knowledge", "text": "", "num_tokens": 803}, {"title": "Fine-tuning to Memorize Knowledge", "text": "", "num_tokens": 882}, {"title": "Fine-tuning to Memorize Knowledge", "text": "", "num_tokens": 809}, {"title": "Fine-tuning to Memorize Knowledge", "text": "", "num_tokens": 830}, {"title": "Fine-tuning to Memorize Knowledge", "text": "", "num_tokens": 806}] +[{"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 867}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 812}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 804}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 808}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 802}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 811}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 824}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 835}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 805}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 807}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 802}, {"title": "How to Finetune a cross-encoder using LLamaIndex", "text": "", "num_tokens": 809}] +[{"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 801}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 808}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 839}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 813}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 826}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 875}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 837}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 812}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 828}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 816}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 832}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 807}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 806}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 823}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 921}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 854}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 922}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 841}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 855}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 818}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 839}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 806}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 819}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 896}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 802}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 833}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 804}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 832}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 809}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 814}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 826}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 819}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 859}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 803}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 852}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 812}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 817}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 815}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 906}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 841}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 914}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 941}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 807}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 838}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 866}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 805}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 827}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 804}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 810}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 841}, {"title": "Fine-tuning a gpt-3.5 ReAct Agent on Better Chain of Thought", "text": "", "num_tokens": 815}] +[] +[{"title": "HyDE Query Transform", "text": "", "num_tokens": 825}, {"title": "HyDE Query Transform", "text": "", "num_tokens": 812}] +[{"title": "Discord Thread Management", "text": "", "num_tokens": 822}, {"title": "Discord Thread Management", "text": "", "num_tokens": 802}, {"title": "Discord Thread Management", "text": "", "num_tokens": 852}, {"title": "Discord Thread Management", "text": "", "num_tokens": 822}] +[{"title": "Building RAG from Scratch (Open-source only!)", "text": "", "num_tokens": 801}, {"title": "Building RAG from Scratch (Open-source only!)", "text": "", "num_tokens": 802}, {"title": "Building RAG from Scratch (Open-source only!)", "text": "", "num_tokens": 828}] +[{"title": "Building Retrieval from Scratch", "text": "", "num_tokens": 804}] +[{"title": "Building Response Synthesis from Scratch", "text": "", "num_tokens": 808}, {"title": "Building Response Synthesis from Scratch", "text": "", "num_tokens": 807}, {"title": "Building Response Synthesis from Scratch", "text": "", "num_tokens": 810}, {"title": "Building Response Synthesis from Scratch", "text": "", "num_tokens": 811}, {"title": "Building Response Synthesis from Scratch", "text": "", "num_tokens": 824}, {"title": "Building Response Synthesis from Scratch", "text": "", "num_tokens": 811}, {"title": "Building Response Synthesis from Scratch", "text": "", "num_tokens": 802}] +[{"title": "Building a (Very Simple) Vector Store from Scratch", "text": "", "num_tokens": 810}, {"title": "Building a (Very Simple) Vector Store from Scratch", "text": "", "num_tokens": 805}, {"title": "Building a (Very Simple) Vector Store from Scratch", "text": "", "num_tokens": 808}, {"title": "Building a (Very Simple) Vector Store from Scratch", "text": "", "num_tokens": 809}, {"title": "Building a (Very Simple) Vector Store from Scratch", "text": "", "num_tokens": 803}] +[{"title": "Building Data Ingestion from Scratch", "text": "", "num_tokens": 801}, {"title": "Building Data Ingestion from Scratch", "text": "", "num_tokens": 805}] +[{"title": "Building Evaluation from Scratch", "text": "", "num_tokens": 801}, {"title": "Building Evaluation from Scratch", "text": "", "num_tokens": 827}, {"title": "Building Evaluation from Scratch", "text": "", "num_tokens": 802}, {"title": "Building Evaluation from Scratch", "text": "", "num_tokens": 805}, {"title": "Building Evaluation from Scratch", "text": "", "num_tokens": 814}] +[{"title": "Building a Router from Scratch", "text": "", "num_tokens": 817}, {"title": "Building a Router from Scratch", "text": "", "num_tokens": 811}, {"title": "Building a Router from Scratch", "text": "", "num_tokens": 819}, {"title": "Building a Router from Scratch", "text": "", "num_tokens": 835}] +[{"title": "import nest_asyncio", "text": "", "num_tokens": 810}] +[] +[] +[] +[{"title": "Redis Docstore+Index Store Demo", "text": "", "num_tokens": 807}, {"title": "Redis Docstore+Index Store Demo", "text": "", "num_tokens": 812}, {"title": "Redis Docstore+Index Store Demo", "text": "", "num_tokens": 813}] +[{"title": "LongContextReorder", "text": "", "num_tokens": 817}] +[{"title": "File Based Node Parsers", "text": "", "num_tokens": 804}, {"title": "File Based Node Parsers", "text": "", "num_tokens": 802}, {"title": "File Based Node Parsers", "text": "", "num_tokens": 808}, {"title": "File Based Node Parsers", "text": " [TextNode(id_='e6236169-45a1-4699-9762-c8d3d89f8fa0', embedding=None, metadata={'filename': 'README.md', 'extension': '.md', 'Header 1': '\ud83d\uddc2\ufe0f LlamaIndex \ud83e\udd99'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={: RelatedNodeInfo(node_id='e7bc328f-85c1-430a-9772-425e59909a58', node_type=None, metadata={'filename': 'README.md', 'extension': '.md', 'Header 1': '\ud83d\uddc2\ufe0f LlamaIndex \ud83e\udd99'}, hash='e538ad7c04f635f1c707eba290b55618a9f0942211c4b5ca2a4e54e1fdf04973'), : RelatedNodeInfo(node_id='51b40b54-dfd3-48ed-b377-5ca58a0f48a3', node_type=None, metadata={'filename': 'README.md', 'extension': '.md', 'Header 1': '\ud83d\uddc2\ufe0f LlamaIndex \ud83e\udd99'}, hash='ca9e3590b951f1fca38687fd12bb43fbccd0133a38020c94800586b3579c3218')}, hash='ec733c85ad1dca248ae583ece341428ee20e4d796bc11adea1618c8e4ed9246a', text='\ud83d\uddc2\ufe0f LlamaIndex \ud83e\udd99\\n[![PyPI - Downloads](https://img.shields.io/pypi/dm/llama-index)](https://pypi.org/project/llama-index/)\\n[![GitHub contributors](https://img.shields.io/github/contributors/jerryjliu/llama_index)](https://github.com/jerryjliu/llama_index/graphs/contributors)\\n[![Discord](https://img.shields.io/discord/1059199217496772688)](https://discord.gg/dGcwcsnxhU)', start_char_idx=None, end_char_idx=None, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n'), TextNode(id_='51b40b54-dfd3-48ed-b377-5ca58a0f48a3', embedding=None, metadata={'filename': 'README.md', 'extension': '.md', 'Header 1': '\ud83d\uddc2\ufe0f LlamaIndex \ud83e\udd99'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={: RelatedNodeInfo(node_id='e7bc328f-85c1-430a-9772-425e59909a58', node_type=None, metadata={'filename': 'README.md', 'extension': '.md', 'Header 1': '\ud83d\uddc2\ufe0f LlamaIndex \ud83e\udd99'}, hash='e538ad7c04f635f1c707eba290b55618a9f0942211c4b5ca2a4e54e1fdf04973'), : RelatedNodeInfo(node_id='e6236169-45a1-4699-9762-c8d3d89f8fa0', node_type=None, metadata={'filename': 'README.md', 'extension': '.md', 'Header 1': '\ud83d\uddc2\ufe0f LlamaIndex \ud83e\udd99'}, hash='ec733c85ad1dca248ae583ece341428ee20e4d796bc11adea1618c8e4ed9246a')}, hash='ca9e3590b951f1fca38687fd12bb43fbccd0133a38020c94800586b3579c3218', text='LlamaIndex (GPT Index) is a data framework for your LLM application.\\n\\nPyPI: \\n- LlamaIndex: https://pypi.org/project/llama-index/.\\n- GPT Index (duplicate): https://pypi.org/project/gpt-index/.\\n\\nLlamaIndex.TS (Typescript/Javascript): https://github.com/run-llama/LlamaIndexTS.\\n\\nDocumentation: https://gpt-index.readthedocs.io/.\\n\\nTwitter: https://twitter.com/llama_index.\\n\\nDiscord: https://discord.gg/dGcwcsnxhU.', start_char_idx=None, end_char_idx=None, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n'), TextNode(id_='ce269047-4718-4a08-b170-34fef19cdafe', embedding=None, metadata={'filename': 'README.md', 'extension': '.md', 'Header 1': '\ud83d\uddc2\ufe0f LlamaIndex \ud83e\udd99', 'Header 3': 'Ecosystem'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={: RelatedNodeInfo(node_id='953934dc-dd4f-4069-9e2a-326ee8a593bf', node_type=None, metadata=", "num_tokens": 67}] +[{"title": "LLM Reranker Demonstration (2021 Lyft 10-k)", "text": "", "num_tokens": 838}] +[{"title": "PII Masking", "text": "", "num_tokens": 810}] +[{"title": "Time-Weighted Rerank", "text": "", "num_tokens": 815}] +[{"title": "Forward/Backward Augmentation", "text": "", "num_tokens": 813}] +[{"title": "Metadata Replacement + Node Sentence Window", "text": "", "num_tokens": 802}, {"title": "Metadata Replacement + Node Sentence Window", "text": "", "num_tokens": 815}, {"title": "Metadata Replacement + Node Sentence Window", "text": "", "num_tokens": 810}, {"title": "Metadata Replacement + Node Sentence Window", "text": "", "num_tokens": 806}, {"title": "Metadata Replacement + Node Sentence Window", "text": "", "num_tokens": 806}] +[{"title": "Rerank can speed up an LLM query without sacrificing accuracy (and in", "text": "", "num_tokens": 1012}, {"title": "Rerank can speed up an LLM query without sacrificing accuracy (and in", "text": "", "num_tokens": 811}] +[] +[{"title": "Cohere Rerank", "text": "", "num_tokens": 806}] +[{"title": "LLM Reranker Demonstration (Great Gatsby)", "text": "", "num_tokens": 807}, {"title": "LLM Reranker Demonstration (Great Gatsby)", "text": "", "num_tokens": 803}, {"title": "LLM Reranker Demonstration (Great Gatsby)", "text": "", "num_tokens": 815}] +[{"title": "Recency Filtering", "text": "", "num_tokens": 804}] +[] +[] +[{"title": "Refine with Structured Answer Filtering", "text": "", "num_tokens": 813}] +[] +[{"title": "LlamaIndex + DeepEval Integration", "text": "", "num_tokens": 839}] +[{"title": "Relevancy Evaluator", "text": "", "num_tokens": 801}] +[] +[{"title": "QuestionGeneration", "text": "", "num_tokens": 812}, {"title": "QuestionGeneration", "text": "", "num_tokens": 804}] +[{"title": "HotpotQADistractor Demo", "text": "", "num_tokens": 806}] +[{"title": "Pairwise Evaluator", "text": "", "num_tokens": 835}, {"title": "Pairwise Evaluator", "text": "", "num_tokens": 842}] +[] +[{"title": "Self Correcting Query Engines - Evaluation & Retry", "text": "", "num_tokens": 808}] +[] +[] +[{"title": "BatchEvalRunner - Running Multiple Evaluations", "text": "", "num_tokens": 815}, {"title": "BatchEvalRunner - Running Multiple Evaluations", "text": " [\"== Demographics ==\\n\\nNew York City is the most populous city in the United States, with 8,804,190 residents incorporating more immigration into the city than outmigration since the 2010 United States census. More than twice as many people live in New York City as compared to Los Angeles, the second-most populous U.S. city; and New York has more than three times the population of Chicago, the third-most populous U.S. city. New York City gained more residents between 2010 and 2020 (629,000) than any other U.S. city, and a greater amount than the total sum of the gains over the same decade of the next four largest U.S. cities, Los Angeles, Chicago, Houston, and Phoenix, Arizona combined. New York City's population is about 44% of New York State's population, and about 39% of the population of the New York metropolitan area. The majority of New York City residents in 2020 (5,141,538, or 58.4%) were living on Long Island, in Brooklyn, or in Queens. The New York City metropolitan statistical area, has the largest foreign-born population of any metropolitan region in the world. The New York region continues to be by far the leading metropolitan gateway for legal immigrants admitted into the United States, substantially exceeding the combined totals of Los Angeles and Miami.\\n\\n\\n=== Population density ===\\n\\nIn 2020, the city had an estimated population density of 29,302.37 inhabitants per square mile (11,313.71/km2), rendering it the nation's most densely populated of all larger municipalities (those with more than 100,000 residents), with several small cities (of fewer than 100,000) in adjacent Hudson County, New Jersey having greater density, as per the 2010 census. Geographically co-extensive with New York County, the borough of Manhattan's 2017 population density of 72,918 inhabitants per square mile (28,154/km2) makes it the highest of any county in the United States and higher than the density of any individual American city. The next three densest counties in the United States, placing second through fourth, are also New York boroughs: Brooklyn, the Bronx, and Queens respectively.\", \"New York, often called New York City or NYC, is the most populous city in the United States. With a 2020 population of 8,804,190 distributed over 300.46 square miles (778.2 km2), New York City is the most densely populated major city in the United States and more than twice as populous as Los Angeles, the nation's second-largest city. New York City is located at the southern tip of New York State. It constitutes the geographical and demographic center of both the Northeast megalopolis and the New York metropolitan area, the largest metropolitan area in the U.S. by both population and urban area. With over 20.1 million people in its metropolitan statistical area and 23.5 million in its combined statistical area as of 2020, New York is one of the world's most populous megacities, and over 58 million people live within 250 mi (400 km) of the city. New York City is a global cultural, financial, entertainment, and media center with a significant influence on commerce, health care and life sciences, research, technology, education, politics, tourism, dining, art", "num_tokens": 0}] +[{"title": "Faithfulness Evaluator", "text": "", "num_tokens": 802}, {"title": "Faithfulness Evaluator", "text": "", "num_tokens": 813}, {"title": "Faithfulness Evaluator", "text": "", "num_tokens": 805}] +[{"title": "Retrieval Evaluation", "text": "", "num_tokens": 804}, {"title": "Retrieval Evaluation", "text": "", "num_tokens": 801}] +[{"title": "Chat Engine - Simple Mode REPL", "text": "", "num_tokens": 806}] +[{"title": "Chat Engine - Context Mode", "text": "", "num_tokens": 837}] +[] +[{"title": "Chat Engine - ReAct Agent Mode", "text": "", "num_tokens": 823}] +[] +[] +[] +[{"title": "[Beta] Text-to-SQL with PGVector", "text": "", "num_tokens": 803}, {"title": "[Beta] Text-to-SQL with PGVector", "text": "", "num_tokens": 806}] +[{"title": "Router Query Engine", "text": "", "num_tokens": 807}, {"title": "Router Query Engine", "text": " Response(response=\"\\nThis document is a reflection on the author's experiences with computers and writing, from his early days of programming on an IBM 1401 to his more recent work on a web application builder. He recounts his experiences with programming, painting, and starting companies, and how he eventually came to write essays about his life and the choices he made.\", source_nodes=[NodeWithScore(node=Node(text='\\t\\t\\n\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines \u2014 CPU, disk drives, printer, card reader \u2014 sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets", "num_tokens": 135}, {"title": "Router Query Engine", "text": " Response(response=\"\\nThis document is a reflection on the author's experiences with computers and writing, from his early days of programming on an IBM 1401 to his more recent work on a web application builder. He recounts his experiences with programming, painting, and starting companies, and how he eventually came to write essays about his life and the choices he made.\", source_nodes=[NodeWithScore(node=Node(text='\\t\\t\\n\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines \u2014 CPU, disk drives, printer, card reader \u2014 sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets", "num_tokens": 307}, {"title": "Router Query Engine", "text": " Response(response=\"\\nAfter RICS, Paul Graham decided to focus on Y Combinator and help get the startups through Demo Day. He also started writing essays again and wrote a few that weren't about startups. In November 2014, he ran out of steam while painting and stopped working on it. He then started working on Lisp again in March 2015.\", source_nodes=[NodeWithScore(node=Node(text=\"of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life's work or I'd have to leave eventually. And it wasn't, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn't, so we decided we'd try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn't be controlled by the founders. So if Sam said yes, we'd let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he'd take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I'm interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm's advice hadn't included anything about that. I wanted to do something completely different, so I decided I'd paint. I wanted to see how good I could get if I really focused on it. So the day after I stopped working on YC, I started painting. I was rusty and it took a while to get back into shape, but it was at least completely engaging. [18]\\n\\nI spent most of the rest of 2014 painting. I'd never been able to work so uninterruptedly before, and I got to be better than I had been. Not good enough, but better. Then in November, right in the middle of a painting, I ran out of steam. Up till that point I'd always been curious to see how the painting I was working on would turn out, but suddenly finishing this one seemed like a chore. So I stopped working on it and cleaned my brushes and haven't painted since. So far anyway.\\n\\nI realize that ", "num_tokens": 406}, {"title": "Router Query Engine", "text": "", "num_tokens": 808}, {"title": "Router Query Engine", "text": " Response(response=\"\\nNotable events and people from the author's time at Interleaf and YC include: \\n\\nInterleaf: \\n- Founding of Interleaf in 1989\\n- Acquisition of Interleaf by Lernout & Hauspie in 1999\\n- The author's work on Lisp, which led to the development of the Lisp programming language. \\n- The author's work on Arc, which led to the development of the Hacker News website. \\n\\nYC: \\n- Founding of YC in 2005\\n- Launch of Hacker News in 2006\\n- Recruitment of Sam Altman as President in 2013\\n- The author's work with Robert Morris, Trevor Blackwell, and Jessica Livingston to create Y Combinator. \\n- The author's work with Sam Altman to reorganize YC and make it a lasting organization. \\n- The author's work with YC startups to help them succeed. \\n- The author's work on Hacker News, which became a major source of stress. \\n- The author's work on internal software for YC, written in Arc. \\n- The author's work with Kevin Hale, who offered the author unsolicited advice. \\n- The author's mother's stroke and death in 2012 and 2014 respectively\\n- Author's retirement from YC in 2014\\n- Author's decision to take up painting in 2014\\n- Author's return to writing essays and Lisp in 2015\", source_nodes=[NodeWithScore(node=Node(text=\"\\nNotable events and people from the author's time at Interleaf and YC include: \\n\\nInterleaf: \\n- Founding of Interleaf in 1989\\n- Acquisition of Interleaf by Lernout & Hauspie in 1999\\n\\nYC: \\n- Founding of YC in 2005\\n- Launch of Hacker News in 2006\\n- Recruitment of Sam Altman as President in 2013\\n- Author's mother's stroke and death in 2012 and 2014 respectively\\n- Author's retirement from YC in 2014\\n- Author's decision to take up painting in 2014\\n- Author's return to writing essays and Lisp in 2015\", doc_id='cd546791-d1e2-420a-9e9c-fde68d2d51dd', embedding=None, doc_hash='0e61517dfdb144c42c1251f3ed80d58fa2c3859a03f9d7a9ae92d513036690c5', extra_info=None, node_info={'start': 0, 'end': 498, '_node_type': }, relationships={: '4183ef8b-b14b-4c73-9754-864d64842c1b'}), score=None), NodeWithScore(node=Node(text=\"\\nNotable events and people from the author's time at Interleaf and YC include: \\n\\nInterleaf: \\n- The author's work on Lisp, which led to the development of the Lisp programming language. \\n- The author's work on Arc, which led to the development of the Hacker News website. \\n\\nYC: \\n- The author's work with Robert Morris, Trevor Blackwell, and Jessica Livingston to create Y Combinator. \\n- The author's work with Sam Altman to reorganize YC and make it a lasting organization. \\n- The author's work with YC startups to help them succeed. \\n- The author's work on Hacker News, which became a major source of stress. \\n- The author's work on internal software for YC, written in Arc. \\n- The author's work with Kevin Hale, who offered the author unsolicited advice.\", doc_id='cee04688-dbe7-4749-809e-5a3723e61ac7', embedding=None, doc_hash='246f0f5349eab9d4639f1584170456843b8bd47fcf2862c88437e976309e3a57', extra_info=None, node_info={'start': 0, 'end': 755, '_node_type': }, relationships={: '283de7d5-81ed-4dc", "num_tokens": 112}] +[{"title": "Recursive Retriever + Document Agents", "text": "", "num_tokens": 807}, {"title": "Recursive Retriever + Document Agents", "text": "", "num_tokens": 806}, {"title": "Recursive Retriever + Document Agents", "text": "", "num_tokens": 804}, {"title": "Recursive Retriever + Document Agents", "text": "", "num_tokens": 914}] +[{"title": "Joint QA Summary Query Engine", "text": "", "num_tokens": 807}] +[{"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 802}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 812}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 804}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 809}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 801}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 835}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 807}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 802}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 801}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 802}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 802}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 801}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 804}, {"title": "Knowledge Graph RAG Query Engine", "text": "", "num_tokens": 803}] +[{"title": "Knowledge Graph Query Engine", "text": "", "num_tokens": 802}, {"title": "Knowledge Graph Query Engine", "text": "", "num_tokens": 874}, {"title": "Knowledge Graph Query Engine", "text": "", "num_tokens": 823}, {"title": "Knowledge Graph Query Engine", "text": "", "num_tokens": 823}, {"title": "Knowledge Graph Query Engine", "text": "", "num_tokens": 836}, {"title": "Knowledge Graph Query Engine", "text": "", "num_tokens": 801}, {"title": "Knowledge Graph Query Engine", "text": "", "num_tokens": 810}] +[{"title": "SQL Auto Vector Query Engine", "text": "", "num_tokens": 803}, {"title": "SQL Auto Vector Query Engine", "text": "", "num_tokens": 836}, {"title": "SQL Auto Vector Query Engine", "text": "", "num_tokens": 823}, {"title": "SQL Auto Vector Query Engine", "text": "", "num_tokens": 916}, {"title": "SQL Auto Vector Query Engine", "text": "", "num_tokens": 900}, {"title": "SQL Auto Vector Query Engine", "text": "", "num_tokens": 808}] +[{"title": "JSON Query Engine", "text": "", "num_tokens": 806}] +[{"title": "Query Engine with Pydantic Outputs", "text": "", "num_tokens": 846}] +[{"title": "Sub Question Query Engine", "text": "", "num_tokens": 921}, {"title": "Sub Question Query Engine", "text": "", "num_tokens": 839}] +[{"title": "Defining a Custom Query Engine", "text": "", "num_tokens": 803}] +[{"title": "Retriever Router Query Engine", "text": "", "num_tokens": 802}, {"title": "Retriever Router Query Engine", "text": "", "num_tokens": 803}, {"title": "Retriever Router Query Engine", "text": "", "num_tokens": 945}] +[{"title": "SQL Join Query Engine", "text": "", "num_tokens": 801}, {"title": "SQL Join Query Engine", "text": "", "num_tokens": 807}, {"title": "SQL Join Query Engine", "text": "", "num_tokens": 837}, {"title": "SQL Join Query Engine", "text": "", "num_tokens": 890}, {"title": "SQL Join Query Engine", "text": "", "num_tokens": 993}, {"title": "SQL Join Query Engine", "text": "", "num_tokens": 825}, {"title": "SQL Join Query Engine", "text": "", "num_tokens": 814}, {"title": "SQL Join Query Engine", "text": "", "num_tokens": 867}, {"title": "SQL Join Query Engine", "text": "", "num_tokens": 814}] +[] +[{"title": "CitationQueryEngine", "text": "", "num_tokens": 953}, {"title": "CitationQueryEngine", "text": "", "num_tokens": 899}] +[{"title": "Retriever Query Engine with Custom Retrievers - Simple Hybrid Search", "text": "", "num_tokens": 801}, {"title": "Retriever Query Engine with Custom Retrievers - Simple Hybrid Search", "text": "", "num_tokens": 805}] +[{"title": "Ensemble Query Engine Guide", "text": "", "num_tokens": 813}, {"title": "Ensemble Query Engine Guide", "text": "", "num_tokens": 803}, {"title": "Ensemble Query Engine Guide", "text": "", "num_tokens": 806}] +[{"title": "FLARE Query Engine", "text": "", "num_tokens": 827}, {"title": "FLARE Query Engine", "text": "", "num_tokens": 818}, {"title": "FLARE Query Engine", "text": "", "num_tokens": 979}, {"title": "FLARE Query Engine", "text": "", "num_tokens": 818}] +[{"title": "SQL Router Query Engine", "text": "", "num_tokens": 822}, {"title": "SQL Router Query Engine", "text": "", "num_tokens": 806}, {"title": "SQL Router Query Engine", "text": "", "num_tokens": 806}] +[{"title": "Recursive Retriever + Query Engine Demo", "text": "", "num_tokens": 806}, {"title": "Recursive Retriever + Query Engine Demo", "text": "", "num_tokens": 814}, {"title": "Recursive Retriever + Query Engine Demo", "text": "", "num_tokens": 827}] +[{"title": "Joint Tabular/Semantic QA over Tesla 10K", "text": "", "num_tokens": 810}, {"title": "Joint Tabular/Semantic QA over Tesla 10K", "text": "", "num_tokens": 804}, {"title": "Joint Tabular/Semantic QA over Tesla 10K", "text": "", "num_tokens": 926}, {"title": "Joint Tabular/Semantic QA over Tesla 10K", "text": "", "num_tokens": 878}, {"title": "Joint Tabular/Semantic QA over Tesla 10K", "text": "", "num_tokens": 801}, {"title": "Joint Tabular/Semantic QA over Tesla 10K", "text": "", "num_tokens": 824}] +[{"title": "FalkorDB Graph Store", "text": "", "num_tokens": 801}] +[{"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 809}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 809}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 814}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 815}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 803}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 819}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 801}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 808}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 807}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 803}, {"title": "Custom Retriever combining KG Index and VectorStore Index", "text": "", "num_tokens": 812}] +[{"title": "Knowledge Graph Construction w/ WikiData Filtering", "text": "", "num_tokens": 807}, {"title": "Knowledge Graph Construction w/ WikiData Filtering", "text": "", "num_tokens": 815}, {"title": "Knowledge Graph Construction w/ WikiData Filtering", "text": "", "num_tokens": 833}, {"title": "Knowledge Graph Construction w/ WikiData Filtering", "text": "", "num_tokens": 803}, {"title": "Knowledge Graph Construction w/ WikiData Filtering", "text": "", "num_tokens": 807}] +[{"title": "Neo4j Graph Store", "text": "", "num_tokens": 818}, {"title": "Neo4j Graph Store", "text": "", "num_tokens": 802}, {"title": "Neo4j Graph Store", "text": "", "num_tokens": 850}] +[{"title": "K\u00f9zu Graph Store", "text": "", "num_tokens": 804}, {"title": "K\u00f9zu Graph Store", "text": "", "num_tokens": 802}, {"title": "K\u00f9zu Graph Store", "text": "", "num_tokens": 814}, {"title": "K\u00f9zu Graph Store", "text": "", "num_tokens": 855}, {"title": "K\u00f9zu Graph Store", "text": "", "num_tokens": 811}] +[{"title": "Nebula Graph Store", "text": "", "num_tokens": 823}, {"title": "Nebula Graph Store", "text": "", "num_tokens": 808}, {"title": "Nebula Graph Store", "text": "", "num_tokens": 802}, {"title": "Nebula Graph Store", "text": "", "num_tokens": 826}, {"title": "Nebula Graph Store", "text": "", "num_tokens": 802}, {"title": "Nebula Graph Store", "text": "", "num_tokens": 801}, {"title": "Nebula Graph Store", "text": "", "num_tokens": 806}] +[{"title": "Knowledge Graph Index", "text": "", "num_tokens": 821}, {"title": "Knowledge Graph Index", "text": "", "num_tokens": 821}] +[{"title": "SQL Index Guide (Core)", "text": "", "num_tokens": 803}] +[{"title": "SQL Query Engine with LlamaIndex + DuckDB", "text": "", "num_tokens": 806}, {"title": "SQL Query Engine with LlamaIndex + DuckDB", "text": "", "num_tokens": 810}] +[{"title": "Document Summary Index", "text": "", "num_tokens": 1373}, {"title": "Document Summary Index", "text": "", "num_tokens": 1053}, {"title": "Document Summary Index", "text": "", "num_tokens": 839}] +[{"title": "Composable Graph Basic", "text": "", "num_tokens": 806}] +[{"title": "Composable Graph", "text": "", "num_tokens": 806}, {"title": "Composable Graph", "text": "", "num_tokens": 809}, {"title": "Composable Graph", "text": "", "num_tokens": 802}] +[{"title": "Composable Graph with Weaviate", "text": "", "num_tokens": 810}] +[{"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 804}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 867}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 845}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 810}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 803}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 817}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 814}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 816}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 816}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 803}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 818}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 801}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 807}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 827}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 803}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 824}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 804}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 813}, {"title": "DeepLake + LlamaIndex", "text": "", "num_tokens": 803}] +[{"title": "Defining a Unified Query Interface over your Data", "text": "", "num_tokens": 815}, {"title": "Defining a Unified Query Interface over your Data", "text": "", "num_tokens": 803}, {"title": "Defining a Unified Query Interface over your Data", "text": "", "num_tokens": 815}, {"title": "Defining a Unified Query Interface over your Data", "text": "", "num_tokens": 814}, {"title": "Defining a Unified Query Interface over your Data", "text": "", "num_tokens": 833}] +[{"title": "Using LlamaIndex with Pinecone", "text": "", "num_tokens": 812}] +[{"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 821}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 819}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 813}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 805}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 801}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 827}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 844}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 817}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 822}] +[{"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 817}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 832}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 827}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 814}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 816}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 803}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 801}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 814}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 809}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 938}, {"title": "Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)", "text": "", "num_tokens": 849}] +[{"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 825}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 805}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 805}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 969}, {"title": "Test Complex Queries over Multiple Documents (with and without Query Decomposition)", "text": "", "num_tokens": 936}] +[{"title": "Github Issue Analysis", "text": "", "num_tokens": 823}] +[{"title": "10Q Analysis", "text": "", "num_tokens": 819}, {"title": "10Q Analysis", "text": "", "num_tokens": 809}] +[{"title": "10K Analysis", "text": "", "num_tokens": 822}, {"title": "10K Analysis", "text": "", "num_tokens": 829}] +[{"title": "Agents", "text": "", "num_tokens": 809}] +[] +[] +[] +[] +[] +[{"title": "Q&A over Documents", "text": "", "num_tokens": 801}, {"title": "Q&A over Documents", "text": "", "num_tokens": 802}] +[{"title": "Basic Usage Pattern", "text": "", "num_tokens": 806}, {"title": "Basic Usage Pattern", "text": "", "num_tokens": 801}, {"title": "Basic Usage Pattern", "text": "", "num_tokens": 808}, {"title": "Basic Usage Pattern", "text": "", "num_tokens": 813}] +[{"title": "Finetuning", "text": "", "num_tokens": 809}] +[] +[] +[{"title": "One-Click Observability", "text": "", "num_tokens": 803}, {"title": "One-Click Observability", "text": "", "num_tokens": 802}] +[] +[{"title": "A Guide to Extracting Terms and Definitions", "text": "", "num_tokens": 811}, {"title": "A Guide to Extracting Terms and Definitions", "text": "", "num_tokens": 801}, {"title": "A Guide to Extracting Terms and Definitions", "text": "", "num_tokens": 810}, {"title": "A Guide to Extracting Terms and Definitions", "text": "", "num_tokens": 804}, {"title": "A Guide to Extracting Terms and Definitions", "text": "", "num_tokens": 815}, {"title": "A Guide to Extracting Terms and Definitions", "text": "", "num_tokens": 802}] +[{"title": "A Guide to Creating a Unified Query Framework over your Indexes", "text": "", "num_tokens": 804}, {"title": "A Guide to Creating a Unified Query Framework over your Indexes", "text": "", "num_tokens": 808}] +[] +[{"title": "A Guide to LlamaIndex + Structured Data", "text": "", "num_tokens": 804}] +[{"title": "Airbyte SQL Index Guide", "text": "", "num_tokens": 905}, {"title": "Airbyte SQL Index Guide", "text": "", "num_tokens": 805}, {"title": "Airbyte SQL Index Guide", "text": "", "num_tokens": 805}, {"title": "Airbyte SQL Index Guide", "text": "", "num_tokens": 816}] +[{"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 806}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 822}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 906}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 830}, {"title": "\ud83d\udcac\ud83e\udd16 How to Build a Chatbot", "text": "", "num_tokens": 809}] +[{"title": "A Guide to Building a Full-Stack Web App with LLamaIndex", "text": "", "num_tokens": 807}, {"title": "A Guide to Building a Full-Stack Web App with LLamaIndex", "text": "", "num_tokens": 810}, {"title": "A Guide to Building a Full-Stack Web App with LLamaIndex", "text": "", "num_tokens": 811}, {"title": "A Guide to Building a Full-Stack Web App with LLamaIndex", "text": "", "num_tokens": 812}] +[{"title": "A Guide to Building a Full-Stack LlamaIndex Web App with Delphic", "text": "", "num_tokens": 803}, {"title": "A Guide to Building a Full-Stack LlamaIndex Web App with Delphic", "text": "", "num_tokens": 815}, {"title": "A Guide to Building a Full-Stack LlamaIndex Web App with Delphic", "text": "", "num_tokens": 804}, {"title": "A Guide to Building a Full-Stack LlamaIndex Web App with Delphic", "text": "", "num_tokens": 802}, {"title": "A Guide to Building a Full-Stack LlamaIndex Web App with Delphic", "text": "", "num_tokens": 801}, {"title": "A Guide to Building a Full-Stack LlamaIndex Web App with Delphic", "text": "", "num_tokens": 801}, {"title": "A Guide to Building a Full-Stack LlamaIndex Web App with Delphic", "text": "", "num_tokens": 807}, {"title": "A Guide to Building a Full-Stack LlamaIndex Web App with Delphic", "text": "", "num_tokens": 806}] +[] +[] +[{"title": "Building Performant RAG Applications for Production", "text": "", "num_tokens": 801}] +[] +[] +[{"title": "The Development Pathway", "text": "", "num_tokens": 808}] +[] +[] +[] +[] +[{"title": "Usage Pattern", "text": "", "num_tokens": 806}] +[] +[] +[{"title": "ServiceContext", "text": "", "num_tokens": 802}] +[] +[] +[] +[] +[{"title": "Cost Analysis", "text": "", "num_tokens": 804}] +[] +[{"title": "Usage Pattern (Response Evaluation)", "text": "", "num_tokens": 807}] +[] +[] +[] +[{"title": "Modules", "text": "", "num_tokens": 810}, {"title": "Modules", "text": "", "num_tokens": 822}] +[] +[{"title": "Usage Pattern", "text": "", "num_tokens": 813}] +[] +[] +[] +[] +[] +[] +[] +[{"title": "Output Parsing", "text": "", "num_tokens": 807}] +[] +[] +[{"title": "Usage Pattern", "text": "", "num_tokens": 813}] +[] +[] +[] +[] +[] +[{"title": "Response Modes", "text": "", "num_tokens": 803}] +[] +[] +[{"title": "Query Transformations", "text": "", "num_tokens": 813}] +[{"title": "Usage Pattern", "text": "", "num_tokens": 812}] +[] +[] +[{"title": "Prompts", "text": "", "num_tokens": 804}] +[{"title": "Usage Pattern", "text": "", "num_tokens": 807}] +[] +[] +[] +[{"title": "Customizing LLMs within LlamaIndex Abstractions", "text": "", "num_tokens": 802}, {"title": "Customizing LLMs within LlamaIndex Abstractions", "text": "", "num_tokens": 805}] +[] +[{"title": "LLM", "text": "", "num_tokens": 801}, {"title": "LLM", "text": "", "num_tokens": 802}, {"title": "LLM", "text": "", "num_tokens": 801}] +[] +[] +[{"title": "Defining and Customizing Documents", "text": "", "num_tokens": 805}] +[] +[{"title": "Customizing Storage", "text": "", "num_tokens": 806}] +[] +[{"title": "Vector Stores", "text": "", "num_tokens": 802}, {"title": "Vector Stores", "text": "", "num_tokens": 803}] +[] +[] +[{"title": "Document Stores", "text": "", "num_tokens": 802}] +[] +[{"title": "Usage Pattern", "text": "", "num_tokens": 804}] +[] +[] +[] +[] +[] +[{"title": "Vector Store Index", "text": "", "num_tokens": 802}] +[] +[] +[{"title": "Document Management", "text": "", "num_tokens": 804}] +[{"title": "Show tqdm progress bars for all primrary index creation operations", "text": "", "num_tokens": 809}, {"title": "Show tqdm progress bars for all primrary index creation operations", "text": "", "num_tokens": 803}] +[{"title": "Composability", "text": "", "num_tokens": 807}] +[] +[] +[{"title": "App Showcase", "text": "", "num_tokens": 803}] +[] +[{"title": "Unit Testing LLMs With DeepEval", "text": "", "num_tokens": 808}] +[] +[] +[{"title": "Guidance", "text": "", "num_tokens": 801}] +[{"title": "Using Vector Stores", "text": "", "num_tokens": 804}, {"title": "Using Vector Stores", "text": "", "num_tokens": 805}, {"title": "Using Vector Stores", "text": "", "num_tokens": 808}, {"title": "Using Vector Stores", "text": "", "num_tokens": 806}, {"title": "Using Vector Stores", "text": "", "num_tokens": 801}] +[] +[] +[] +[{"title": "ChatGPT Plugin Integrations", "text": "", "num_tokens": 808}] +[{"title": "Customization Tutorial", "text": "", "num_tokens": 807}] +[{"title": "High-Level Concepts", "text": "", "num_tokens": 802}] +[] +[] +[]