-1

Here is the code

import os

import constants
import chromadb
import PySimpleGUI as sg

from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.vectorstores import Chroma
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.memory import ConversationBufferMemory

#Set up environment and embeddings
os.environ["OPENAI_API_KEY"] = constants.APIKEY
embeddings = OpenAIEmbeddings(openai_api_key=constants.APIKEY)

#Find stored client and create instance for usage
find_client = chromadb.PersistentClient(constants.persist_directory)
openai_lc_client = Chroma(persist_directory=constants.persist_directory, embedding_function=embeddings, client=find_client)

#Create memory for the QA chain and chose model
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
model = ChatOpenAI(temperature=0, model='gpt-4')

#Create QA chain
qa = ConversationalRetrievalChain.from_llm(llm=model, retriever=openai_lc_client.as_retriever(), chain_type='stuff', memory=memory)

#Set up variables for GUI
my_img = sg.Image(filename='./logo.png', key='_LOGO_')

iolist = []
layout = [
        [sg.Column([[my_img]], justification='center')],
        [sg.Text('Where can we take you?                                                                                                                                           Powered by GPT-4')],
        [sg.InputText(size=(100, 5), expand_y=True, do_not_clear=False)],
        [sg.Multiline('', size=(100, 20), background_color='Slategray3',key='-SAYINGTHINGS-', write_only=True, no_scrollbar=True)],
        [sg.Column([[sg.Submit(size=(20, 4)), sg.CloseButton('Close', size=(20, 4))]], justification='center')]
        ]

#Initialize GUI
window = sg.Window('customGPT', layout, finalize=True)

#Loop to show GUI
while True:
        event, things = window.read()

        input = things[0]

        if event in (sg.WIN_CLOSE_ATTEMPTED_EVENT, 'Close'):
            break

        query = f"###Prompt {things[0]}"
    
        llm_response = qa({'question': query})
        output = llm_response['answer']

        iolist.append(f'Question:\n{input}\n')
        iolist.append(f'Answer:\n{output}\n')
        window['-SAYINGTHINGS-'].update('\n'.join(iolist))

#Close out of directory and GUI
window.close()
openai_lc_client.delete_collection()
openai_lc_client.persist()

That runs the actual chat model, before i run that i run this docloader code

import os
import constants
import chromadb

from langchain.embeddings import OpenAIEmbeddings
from langchain.document_loaders import DirectoryLoader, PDFMinerLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.vectorstores import Chroma

#Set up environment and embeddings
os.environ["OPENAI_API_KEY"] = constants.APIKEY
embeddings = OpenAIEmbeddings(openai_api_key=constants.APIKEY)

#Function to load documents
def load():
    loader = DirectoryLoader(constants.file_directory, glob='*.pdf', loader_cls=PDFMinerLoader)
    data = loader.load()

    text_splitter = RecursiveCharacterTextSplitter(chunk_size = 512, chunk_overlap = 10)
    docs = text_splitter.split_documents(data)

    return docs

#Create persistent directory in desired location
new_client = chromadb.PersistentClient(constants.persist_directory)
openai_lc_client = Chroma.from_documents(
        load(), embeddings, client=new_client, collection_name="openai_collection", persist_directory=constants.persist_directory
    )

The docloader correctly loads the documents into an sqlite3 db So I don't think the issue is with that, but when I try to use the db in the main script it just produces a regular chatmodel with no context

Up until this week it was working, I'm assuming it happened due to updates with langchain and openai but I have no idea why it just stopped working with context loaded.

  • Added context I tried reinstalling all modules to make sure they were up to date when i ask the gpt instance what version it's running it says it is based on gpt-3 despite me setting it to gpt-4 – Nicholas Mounts Aug 23 '23 at 20:56

0 Answers0