Nova模型支持使用Langchain来调用
先更新langchain包:
%pip3 install -q langchain langchain_community faiss_cpu pypdf --upgrade
from langchain_aws import ChatBedrockConverse
from langchain_core.messages import HumanMessage
llm = ChatBedrockConverse(
model_id="us.amazon.nova-lite-v1:0",
temperature=0.7
)
messages = [
("system", "Provide three alternative song titles for a given user title"),
("user", "Teardrops on My Guitar"),
]
response = llm.invoke(messages)
print(f"Request ID: {response.id}")
response.pretty_print()
# Here we can pass the chat history to the model to ask follow up questions
multi_turn_messages = [
*messages,
response,
HumanMessage(content="Select your favorite and tell me why"),
]
response = llm.invoke(multi_turn_messages)
print(f"\n\nRequest ID: {response.id}")
response.pretty_print()
可以将各种媒体类型传递给模型:
from langchain_aws import ChatBedrockConverse
from langchain_core.messages import HumanMessage
image_path = "sunset.png"
llm = ChatBedrockConverse(
model="us.amazon.nova-lite-v1:0",
temperature=0.7
)
with open(image_path, "rb") as image_file:
binary_data = image_file.read()
message = HumanMessage(
content=[
{"image": {"format": "png", "source": {"bytes": binary_data}}},
{"text": "Provide a summary of this photo"},
]
)
response = llm.invoke([message])
print(f"\n\nRequest ID: {response.id}")
response.pretty_print()
from langchain_aws import ChatBedrockConverse
from langchain_core.messages import HumanMessage
video_path = "the-sea.mp4"
llm = ChatBedrockConverse(
model="us.amazon.nova-lite-v1:0",
temperature=0.7
)
with open(video_path, "rb") as video_file:
binary_data = video_file.read()
message = HumanMessage(
content=[
{"video": {"format": "mp4", "source": {"bytes": binary_data}}},
{"type": "text", "text": "描述以下视频"},
]
)
response = llm.invoke([message])
print(f"\n\nRequest ID: {response.id}")
response.pretty_print()
from langchain_aws import ChatBedrockConverse
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.output_parsers import StrOutputParser
llm = ChatBedrockConverse(
model="us.amazon.nova-lite-v1:0",
temperature=0.7
)
chain = llm | StrOutputParser()
messages = [
SystemMessage(content="You are an author with experience writing creative novels"),
HumanMessage(
content="Write an outlin for a novel about a wizard named Theodore graduating from college"
),
]
for chunk in chain.stream(messages):
print(chunk, end="")