diff --git a/Solar-Fullstack-LLM-101/80_gradio.ipynb b/Solar-Fullstack-LLM-101/80_gradio.ipynb
index 357e22b..f606df1 100644
--- a/Solar-Fullstack-LLM-101/80_gradio.ipynb
+++ b/Solar-Fullstack-LLM-101/80_gradio.ipynb
@@ -1,187 +1,237 @@
{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# 80. Gradio\n",
- "\n",
- "## Overview \n",
- "In this exercise, we will use Gradio to implement the RAG (Retrieval-Augmented Generation) system we previously learned as an application. By utilizing the Upstage Solar API and LangChain, we will generate context-based responses to user queries and create an easy-to-use interface for interaction using Gradio. This tutorial will guide you through building an application that combines these tools effectively.\n",
- " \n",
- "## Purpose of the Exercise\n",
- "The purpose of this exercise is to construct a simple and intuitive web interface using the Gradio library and implement an interactive chat interface to enable user interaction. By the end of this tutorial, users will be able to create applications that facilitate seamless communication and provide accurate responses based on user input, leveraging the power of RAG and Gradio.\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [],
- "source": [
- "!pip install -qU gradio python-dotenv langchain-upstage python-dotenv"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {},
- "outputs": [],
- "source": [
- "# @title set API key\n",
- "import os\n",
- "import getpass\n",
- "from pprint import pprint\n",
- "import warnings\n",
- "\n",
- "warnings.filterwarnings(\"ignore\")\n",
- "\n",
- "from IPython import get_ipython\n",
- "\n",
- "if \"google.colab\" in str(get_ipython()):\n",
- " # Running in Google Colab. Please set the UPSTAGE_API_KEY in the Colab Secrets\n",
- " from google.colab import userdata\n",
- " os.environ[\"UPSTAGE_API_KEY\"] = userdata.get(\"UPSTAGE_API_KEY\")\n",
- "else:\n",
- " # Running locally. Please set the UPSTAGE_API_KEY in the .env file\n",
- " from dotenv import load_dotenv\n",
- "\n",
- " load_dotenv()\n",
- "\n",
- "if \"UPSTAGE_API_KEY\" not in os.environ:\n",
- " os.environ[\"UPSTAGE_API_KEY\"] = getpass.getpass(\"Enter your Upstage API key: \")\n"
- ]
-
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [],
- "source": [
- "import gradio as gr\n",
- "\n",
- "from langchain_upstage import ChatUpstage\n",
- "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
- "from langchain_core.output_parsers import StrOutputParser\n",
- "from langchain.schema import AIMessage, HumanMessage\n",
- "\n",
- "\n",
- "llm = ChatUpstage()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [],
- "source": [
- "# More general chat\n",
- "chat_with_history_prompt = ChatPromptTemplate.from_messages(\n",
- " [\n",
- " (\"system\", \"You are a helpful assistant.\"),\n",
- " MessagesPlaceholder(variable_name=\"history\"),\n",
- " (\"human\", \"{message}\"),\n",
- " ]\n",
- ")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [],
- "source": [
- "chain = chat_with_history_prompt | llm | StrOutputParser()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [],
- "source": [
- "def chat(message, history):\n",
- " history_langchain_format = []\n",
- " for human, ai in history:\n",
- " history_langchain_format.append(HumanMessage(content=human))\n",
- " history_langchain_format.append(AIMessage(content=ai))\n",
- "\n",
- " return chain.invoke({\"message\": message, \"history\": history_langchain_format})"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [],
- "source": [
- "with gr.Blocks() as demo:\n",
- " chatbot = gr.ChatInterface(\n",
- " chat,\n",
- " examples=[\n",
- " \"How to eat healthy?\",\n",
- " \"Best Places in Korea\",\n",
- " \"How to make a chatbot?\",\n",
- " ],\n",
- " title=\"Solar Chatbot\",\n",
- " description=\"Upstage Solar Chatbot\",\n",
- " )\n",
- " chatbot.chatbot.height = 300"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "view-in-github",
+ "colab_type": "text"
+ },
+ "source": [
+ "
"
+ ]
+ },
{
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Running on local URL: http://127.0.0.1:7860\n",
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "6IiAw5kOb04r"
+ },
+ "source": [
+ "# 80. Gradio\n",
+ "\n",
+ "## Overview \n",
+ "In this exercise, we will use Gradio to implement the RAG (Retrieval-Augmented Generation) system we previously learned as an application. By utilizing the Upstage Solar API and LangChain, we will generate context-based responses to user queries and create an easy-to-use interface for interaction using Gradio. This tutorial will guide you through building an application that combines these tools effectively.\n",
+ "\n",
+ "## Purpose of the Exercise\n",
+ "The purpose of this exercise is to construct a simple and intuitive web interface using the Gradio library and implement an interactive chat interface to enable user interaction. By the end of this tutorial, users will be able to create applications that facilitate seamless communication and provide accurate responses based on user input, leveraging the power of RAG and Gradio.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "id": "AHaJUPo1b04s"
+ },
+ "outputs": [],
+ "source": [
+ "!pip install -qU gradio python-dotenv langchain-upstage python-dotenv langchain"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "id": "I2mK6xJYb04t"
+ },
+ "outputs": [],
+ "source": [
+ "# @title set API key\n",
+ "from pprint import pprint\n",
+ "import os\n",
+ "\n",
+ "import warnings\n",
+ "\n",
+ "warnings.filterwarnings(\"ignore\")\n",
+ "\n",
+ "if \"google.colab\" in str(get_ipython()):\n",
+ " # Running in Google Colab. Please set the UPSTAGE_API_KEY in the Colab Secrets\n",
+ " from google.colab import userdata\n",
+ "\n",
+ " os.environ[\"UPSTAGE_API_KEY\"] = userdata.get(\"UPSTAGE_API_KEY\")\n",
+ "else:\n",
+ " # Running locally. Please set the UPSTAGE_API_KEY in the .env file\n",
+ " from dotenv import load_dotenv\n",
+ "\n",
+ " load_dotenv()\n",
"\n",
- "To create a public link, set `share=True` in `launch()`.\n"
+ "assert (\n",
+ " \"UPSTAGE_API_KEY\" in os.environ\n",
+ "), \"Please set the UPSTAGE_API_KEY environment variable\""
]
},
{
- "data": {
- "text/html": [
- "
"
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "id": "fIyuV4f7b04t"
+ },
+ "outputs": [],
+ "source": [
+ "from langchain_upstage import ChatUpstage\n",
+ "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
+ "from langchain_core.output_parsers import StrOutputParser\n",
+ "from langchain.schema import AIMessage, HumanMessage\n",
+ "\n",
+ "\n",
+ "llm = ChatUpstage(model=\"solar-pro\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "id": "3Z7Vv1xwb04t"
+ },
+ "outputs": [],
+ "source": [
+ "# More general chat\n",
+ "chat_with_history_prompt = ChatPromptTemplate.from_messages(\n",
+ " [\n",
+ " (\"system\", \"You are a helpful assistant.\"),\n",
+ " MessagesPlaceholder(variable_name=\"history\"),\n",
+ " (\"human\", \"{message}\"),\n",
+ " ]\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "id": "dSqT2qEYb04u"
+ },
+ "outputs": [],
+ "source": [
+ "chain = chat_with_history_prompt | llm | StrOutputParser()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "id": "OJJjvRcnb04u"
+ },
+ "outputs": [],
+ "source": [
+ "def chat(message, history):\n",
+ " history_langchain_format = []\n",
+ " for human, ai in history:\n",
+ " history_langchain_format.append(HumanMessage(content=human))\n",
+ " history_langchain_format.append(AIMessage(content=ai))\n",
+ "\n",
+ " return chain.invoke({\"message\": message, \"history\": history_langchain_format})"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "id": "NdsTnOn8b04u"
+ },
+ "outputs": [],
+ "source": [
+ "import gradio as gr\n",
+ "\n",
+ "with gr.Blocks() as demo:\n",
+ " chatbot = gr.ChatInterface(\n",
+ " chat,\n",
+ " examples=[\n",
+ " \"How to eat healthy?\",\n",
+ " \"Best Places in Korea\",\n",
+ " \"How to make a chatbot?\",\n",
+ " ],\n",
+ " title=\"Solar Chatbot\",\n",
+ " description=\"Upstage Solar Chatbot\",\n",
+ " )\n",
+ " chatbot.chatbot.height = 300"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "id": "KhPNfTOFb04u",
+ "outputId": "df86f307-acc9-4151-9c5e-68e959b81479",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 645
+ }
+ },
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Setting queue=True in a Colab notebook requires sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n",
+ "\n",
+ "Colab notebook detected. To show errors in colab notebook, set debug=True in launch()\n",
+ "Running on public URL: https://f888d908ca5729f8d5.gradio.live\n",
+ "\n",
+ "This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)\n"
+ ]
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "text/plain": [
+ ""
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "metadata": {}
+ }
],
- "text/plain": [
- ""
+ "source": [
+ "if __name__ == \"__main__\":\n",
+ " demo.launch()"
]
- },
- "metadata": {},
- "output_type": "display_data"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "# Example\n",
+ "\n",
+ ""
+ ],
+ "metadata": {
+ "id": "qh6DCd48dk6v"
+ }
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.10"
+ },
+ "colab": {
+ "provenance": [],
+ "include_colab_link": true
}
- ],
- "source": [
- "if __name__ == \"__main__\":\n",
- " demo.launch()"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3 (ipykernel)",
- "language": "python",
- "name": "python3"
},
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.10.10"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 4
+ "nbformat": 4,
+ "nbformat_minor": 0
}