from langchain_openai import ChatOpenAI from langchain.prompts import ( ChatPromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, HumanMessagePromptTemplate, ) from langchain.chains import LLMChain from langchain.memory import ConversationBufferMemory
llm = ChatOpenAI( temperature=0.95, model="glm-4", openai_api_key="", ##!you api openai_api_base="https://open.bigmodel.cn/api/paas/v4/" ) prompt = ChatPromptTemplate( messages=[ SystemMessagePromptTemplate.from_template( "You are a nice chatbot having a conversation with a human." ), MessagesPlaceholder(variable_name="chat_history"), HumanMessagePromptTemplate.from_template("{question}") ] )
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True) conversation = LLMChain( llm=llm, prompt=prompt, verbose=True, memory=memory ) conversation.invoke({"question": "tell me a joke"})