28 lines
641 B
Python
28 lines
641 B
Python
from langchain_core.messages import HumanMessage, SystemMessage
|
|
from langchain_openai import ChatOpenAI
|
|
|
|
|
|
def main():
|
|
llm = ChatOpenAI(
|
|
model="gpt-3.5-turbo",
|
|
temperature=0.1,
|
|
max_tokens=100,
|
|
top_p=0.9,
|
|
frequency_penalty=0.5,
|
|
presence_penalty=0.5,
|
|
stop_sequences=["\n", "Human:", "AI:"],
|
|
seed=100,
|
|
)
|
|
|
|
messages = [
|
|
SystemMessage(content="You are a helpful assistant!"),
|
|
HumanMessage(content="What is the capital of France?"),
|
|
]
|
|
|
|
result = llm.invoke(messages).content
|
|
print("LLM output:\n", result)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|