# Global settings from llama_index.core import Settings from llama_index.core import VectorStoreIndex, SimpleDirectoryReader from portkey_ai.llms.llama_index import PortkeyLLM portkey = PortkeyLLM(virtual_key=openai_virtual_key) Settings.llm = portkey documents = SimpleDirectoryReader("data").load_data() index = VectorStoreIndex.from_documents(documents) query_engine = index.as_query_engine(llm=portkey) response = query_engine.query("What did the author do growing up?") print(response)