0

In the code below I try to use three inputs in template, namely context, chat_history and question.

# Sources.
# Memory: <https://python.langchain.com/docs/modules/memory/>.

# Imports.
from langchain.chains import LLMChain
from langchain.llms import OpenAI
from langchain.memory import ConversationBufferMemory
from langchain.prompts import PromptTemplate
import json
import os

# Input from external file.
openai_api_key = json.load(open('input.json'))['openai_api_key']

# Load environment.
os.environ["OPENAI_API_KEY"] = openai_api_key

# Context.
context = """
The secret code is 1234.
"""


# ...
llm = OpenAI(temperature=0)

# Notice that "chat_history" is present in the prompt template.
template = """You are a nice chatbot having a conversation with a human. In answering questions, you take the context 
below into account.

Context: {context}

Previous conversation:
{chat_history}

New human question: {question}
Response:"""
prompt = PromptTemplate.from_template(template)

# Notice that we need to align the `memory_key`.
memory = ConversationBufferMemory(memory_key="chat_history")
conversation = LLMChain(
    llm=llm,
    prompt=prompt,
    verbose=True,
    memory=memory
)

# Conversation.
while True:
    question = input("User: ")
    if question in ['q', 'quit', 'exit', 'stop', 'abort']:
        break
    answer = conversation(inputs={"question": question, "context": context})
    print(answer)
    print("=" * 100)

This however gives me the error ValueError: One input key expected got ['question', 'context'] .

What am I doing wrong?

Adriaan
  • 715
  • 10
  • 22

0 Answers0