03. Model Serialization-Save and Call
Serialization
# API KEY a configuration file for managing environment variables
from dotenv import load_dotenv
# API KEY load information
load_dotenv()Last updated
# API KEY a configuration file for managing environment variables
from dotenv import load_dotenv
# API KEY load information
load_dotenv()Last updated
True# LangSmith Set up tracking. https://smith.langchain.com
# !pip install langchain-teddynote
from langchain_teddynote import logging
# Enter a project name.
logging.langsmith("CH04-Models") Start tracking LangSmith.
[Project name]
CH04-Models import os
from langchain_openai import ChatOpenAI
from langchain.prompts import PromptTemplate
# Create questions using prompt templates.
prompt = PromptTemplate.from_template("{fruit} What is the color of?")# Check if serialization is possible.
print(f"ChatOpenAI: {ChatOpenAI.is_lc_serializable()}")ChatOpenAI: Truellm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0)
# Check if serialization is possible.
print(f"ChatOpenAI: {llm.is_lc_serializable()}")ChatOpenAI: True# Create a chain.
chain = prompt | llm
# Check if serialization is possible.
chain.is_lc_serializable()