# llm_integration.py from haystack.components.generators.openai import OpenAIGenerator from haystack.components.builders import PromptBuilder from haystack.utils import Secret # 从 config 导入新的变量名 from config import ( OPENAI_LLM_MODEL, DEFAULT_PROMPT_TEMPLATE, OPENAI_API_KEY_FROM_CONFIG, # 使用配置中的 Key OPENAI_API_BASE_URL_CONFIG, # 使用配置中的 Base URL ) def initialize_llm_and_prompt_builder() -> tuple[OpenAIGenerator, PromptBuilder]: """ Initializes the OpenAI Generator and PromptBuilder components. Reads API Key and Base URL directly from config.py. """ if not OPENAI_API_KEY_FROM_CONFIG or "YOUR_API_KEY" in OPENAI_API_KEY_FROM_CONFIG: print("警告: OpenAI API Key 未在 config.py 中有效配置。") # Consider raising an error # raise ValueError("OpenAI API Key not configured correctly in config.py") print(f"Initializing OpenAI Generator with model: {OPENAI_LLM_MODEL}") if OPENAI_API_BASE_URL_CONFIG: print(f"Using custom API base URL from config: {OPENAI_API_BASE_URL_CONFIG}") else: print("Using default OpenAI API base URL (None specified in config).") llm_generator = OpenAIGenerator( # 直接使用从 config.py 导入的 key 和 base_url api_key=Secret.from_token(OPENAI_API_KEY_FROM_CONFIG), model=OPENAI_LLM_MODEL, api_base_url=OPENAI_API_BASE_URL_CONFIG, ) print("OpenAI Generator initialized.") print("Initializing Prompt Builder...") prompt_builder = PromptBuilder(template=DEFAULT_PROMPT_TEMPLATE) print("Prompt Builder initialized.") return llm_generator, prompt_builder # __main__ 部分也需要调整 # Example Usage if __name__ == "__main__": from haystack import Document llm, builder = initialize_llm_and_prompt_builder() sample_question = "Haystack 是什么?" sample_docs = [ Document(content="Haystack 是一个用于构建 NLP 应用程序的开源框架。"), Document(content="你可以使用 Haystack 连接不同的组件。"), ] prompt_builder_output = builder.run(question=sample_question, documents=sample_docs) prompt = prompt_builder_output["prompt"] print("\n--- Generated Prompt ---") print(prompt) print("\n--- Running OpenAI LLM ---") try: # Note: OpenAIGenerator expects 'prompt' as input key by default llm_output = llm.run(prompt=prompt) print("LLM Output:", llm_output) except Exception as e: print(f"Error during OpenAI API call: {e}")