!pip install langchain_community
!pip install pypdf
!pip install tiktoken
!pip install faiss-cpu
from langchain_community.document_loaders import PyPDFLoader
api_key="sk-proj-본인의 API키"
pdf_filepath = '/content/차세대 한국형 스마트팜 개발.pdf'
loader = PyPDFLoader(pdf_filepath)
data = loader.load()
from langchain.text_splitter import CharacterTextSplitter
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
data = text_splitter.split_documents(data)
from langchain.embeddings.openai import OpenAIEmbeddings
embeddings = OpenAIEmbeddings(model="text-embedding-ada-002", api_key=api_key)
from langchain.vectorstores import FAISS
vectorstore = FAISS.from_documents(data, embeddings)
from langchain.chat_models import ChatOpenAI
llm = ChatOpenAI(model="gpt-3.5-turbo", api_key=api_key)
from langchain.memory import ConversationBufferMemory
memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
from langchain.chains import ConversationalRetrievalChain
conversation_chain = ConversationalRetrievalChain.from_llm(
llm=llm,
chain_type="stuff",
retriever=vectorstore.as_retriever(),
memory=memory
)
query = "한국형 스마트팜이란?"
result = conversation_chain({"question": query})
answer = result["answer"]
answer
query = "스마트팜 농가의 통합관리 및 유지보수가 어려운 이유는?"
result = conversation_chain({"question": query})
answer = result["answer"]
answer
import gradio as gr
from langchain.chat_models import ChatOpenAI
from langchain.prompts import PromptTemplate
import os
# OpenAI API 키 설정 (환경 변수 또는 직접 입력)
os.environ["OPENAI_API_KEY"] = api_key # 여기에 OpenAI API 키를 입력하세요
# 프롬프트 템플릿 설정
prompt = PromptTemplate(
input_variables=["chat_history", "question"],
template="""You are an AI assistant. You are
currently having a conversation with a human. Answer the questions.
chat_history: {chat_history},
Human: {question}
AI:"""
)
# LLM 모델 설정
llm = ChatOpenAI(
temperature=0, # 창의성 설정
model_name='gpt-4o-mini-2024-07-18' # 모델명
)
# 최신 메모리 사용 방식 적용 (ConversationBufferWindowMemory 제거)
def chat_with_ai(user_input, history):
"""사용자 입력을 받아 AI 응답을 반환하는 함수."""
chat_history = "\n".join([f"Human: {h['content']}" if h['role'] == 'user' else f"AI: {h['content']}" for h in history])
response = (prompt | llm).invoke({"chat_history": chat_history, "question": user_input}).content
return {"role": "assistant", "content": response}
# Gradio 인터페이스 설정
demo = gr.ChatInterface(chat_with_ai, type="messages")
demo.launch(share=True, debug=True)
벡터 데이터베이스 (0) | 2025.02.18 |
---|---|
RAG (1) | 2025.02.17 |
Fast API (0) | 2024.12.02 |
Gradio (0) | 2024.11.29 |
모델 학습 능력 평가 (2) | 2024.11.28 |