1

`I tried saving the conversation history in session_attributes['sessionContext'] it worked but when logs are generated i can see conversation history in session_attributes['sessionContext'] but not in {history} i.e here only the current message from lex appers i want to pass session_attributes as history in thre prompt.

import all necessary pacakages
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)

def close(session_attributes, active_contexts, fulfillment_state, intent, message):
    
    response = {
        'sessionState': {
            'activeContexts':[{
                'name': 'intentContext',
                'contextAttributes': active_contexts,
                'timeToLive': {
                    'timeToLiveInSeconds': 600,
                    'turnsToLive': 1
                }
            }],
            'sessionAttributes': session_attributes,
            'dialogAction': {
                'type': 'Close',
            },
            'intent': intent,
        },
        'messages': [{'contentType': 'PlainText', 'content': message}]
    }

    return response
    
def delegate(session_attributes, active_contexts, intent, message):
    return {
        'sessionState': {
            'activeContexts':[{
                'name': 'intentContext',
                'contextAttributes': active_contexts,
                'timeToLive': {
                    'timeToLiveInSeconds': 600,
                    'turnsToLive': 1
                }
            }],
            'sessionAttributes': session_attributes,
            'dialogAction': {
                'type': 'Delegate',
            },
            'intent': intent,
        },
        'messages': [{'contentType': 'PlainText', 'content': message}]
    }


def initial_message(intent_name):
    response = {
            'sessionState': {
                'dialogAction': {
                    'type': 'ElicitSlot',
                    'slotToElicit': 'Location' if intent_name=='BookHotel' else 'PickUpCity'
                },
                'intent': {
                    'confirmationState': 'None',
                    'name': intent_name,
                    'state': 'InProgress'
                }
            }
    }
    
    return response

# --- Helper Functions ---

def try_ex(value):
    """
    Call passed in function in try block. If KeyError is encountered return None.
    This function is intended to be used to safely access dictionary of the Slots section in the payloads.
    Note that this function would have negative impact on performance.
    """

    if value is not None:
        return value['value']['interpretedValue']
    else:
        return None

def invoke_llm(query, session_history):
    
    endpoint_name = 'endpoint-name'
    region = 'us-east-1'
    kendra_index_id = 'kendra-index-id'
    print("invoke LLM session__history:: ", session_history )
    class ContentHandler(ContentHandlerBase):
        content_type = "application/json"
        accepts = "application/json"
    
        def transform_input(self, prompt: str, model_kwargs: dict) -> bytes:
            input_str = json.dumps({"text_inputs": prompt, **model_kwargs})
            return input_str.encode('utf-8')
        def transform_output(self, output: bytes) -> str:
            response_json = json.loads(output.read().decode("utf-8"))
            return response_json["generated_texts"][0]
    
    content_handler = ContentHandler()
    llm=SagemakerEndpoint(
            endpoint_name=endpoint_name, 
            region_name=region, 
            model_kwargs={"temperature":1e-10, "max_length": 500},
            content_handler=content_handler
        )
    
    retriever = KendraIndexRetriever(kendraindex=kendra_index_id, 
        awsregion=region, 
        return_source_documents=True)
    
    
    
    
    template = """
    Use the following context (delimited by <ctx></ctx>) and the chat history (delimited by <hs></hs>) to answer the question:
    ------
    <ctx>
    {context}
    </ctx>
    ------
    <hs>
    {history}
    </hs>
    ------
    {question}
    Answer:
    """
    prompt = PromptTemplate(
        input_variables=["history", "context", "question"],
        template=template
    )

    qa = RetrievalQA.from_chain_type(
        llm=llm,
        chain_type='stuff',
        retriever=retriever,
        verbose=True,
        
        chain_type_kwargs={
            "verbose": True,
            "prompt": prompt,
            "memory": ConversationBufferMemory(
                memory_key="history",
                input_key="question",
                return_messages=True),
        }
    )
    # chat_history = []
    # while True:
    result = qa({'query':query, 'history': session_history})
    # result = qa({'query':query, 'history': lex_conv_history})
    
    response = result['result']
    
    response = qa(query)
    print("Answer: ", response['result'])

    return response


def lambda_handler(intent_request, context):
    
    print("input received: ", intent_request)
    
    logger.debug(intent_request)
    
    intent = intent_request['sessionState']['intent']
    
    
    
    session_attributes = intent_request['sessionState']['sessionAttributes']
    
    #print ("Session attributes -----",session_attributes)
    if 'sessionContext' not in session_attributes.keys():
        print("First Execution")
        session_attributes['sessionContext'] = ''
    
    active_contexts = {}
    if intent['name']=='FallbackIntent':
        query = intent_request['inputTranscript']+session_attributes['sessionContext']
        response = invoke_llm(query, session_attributes['sessionContext'])
        
        response_json = json.dumps({
            'Answer': response['result'],
        })
        
        active_contexts['Query'] = response_json
        logger.debug('Answer from LLM={}'.format(response_json))
        intent['confirmationState']="Confirmed"
        intent['state']="Fulfilled"
        session_attributes['sessionContext'] = session_attributes['sessionContext'] + ' ' + intent_request['inputTranscript']  + ' ' + response['result']
        print ("History - - - ",session_attributes['sessionContext'])
        return close(session_attributes, active_contexts, 'Fulfilled', intent, response['result'])
    
    #confirmation_status = intent_request['sessionState']['intent']['confirmationState']
    query = try_ex(intent_request['sessionState']['intent']['slots']['Query'])
    print("Question by user: ", query)
    
    if query or intent['name']=='FallbackIntent':
        response = invoke_llm(query, session_attributes['sessionContext'])
        # response = invoke_llm(query)
        
        response_json = json.dumps({
            'Answer': response['result'],
        })
        
        logger.debug('Answer from LLM={}'.format(response_json))
        intent['confirmationState']="Confirmed"
        intent['state']="Fulfilled"
        session_attributes['sessionContext'] = session_attributes['sessionContext'] + ' ' + intent_request['inputTranscript']  + ' ' + response['result']
        print ("History - - - ",session_attributes['sessionContext'])
        return close(session_attributes, active_contexts, 'Fulfilled', intent, response['result'])

1 Answers1

1

From your session variable you need to iterate through messages and add it to memory.

memory = ConversationBufferMemory()
memory.chat_memory.add_user_message("hi!")
memory.chat_memory.add_ai_message("whats up?") 

Now load the memory variables

memory.load_memory_variables({})

You should be able to see history now

{'history': 'Human: hi!\nAI: whats up?'}
ZKS
  • 817
  • 3
  • 16
  • 31