This commit is contained in:
leehk 2025-03-06 12:00:56 +08:00
parent 45c0e2e6ca
commit 8d9ec8dc05
5 changed files with 197 additions and 0 deletions

View File

@ -0,0 +1,33 @@
import streamlit as st
import anthropic
with st.sidebar:
anthropic_api_key = st.text_input("Anthropic API Key", key="file_qa_api_key", type="password")
"[View the source code](https://github.com/streamlit/llm-examples/blob/main/pages/1_File_Q%26A.py)"
"[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/streamlit/llm-examples?quickstart=1)"
st.title("📝 File Q&A with Anthropic")
uploaded_file = st.file_uploader("Upload an article", type=("txt", "md"))
question = st.text_input(
"Ask something about the article",
placeholder="Can you give me a short summary?",
disabled=not uploaded_file,
)
if uploaded_file and question and not anthropic_api_key:
st.info("Please add your Anthropic API key to continue.")
if uploaded_file and question and anthropic_api_key:
article = uploaded_file.read().decode()
prompt = f"""{anthropic.HUMAN_PROMPT} Here's an article:\n\n<article>
{article}\n\n</article>\n\n{question}{anthropic.AI_PROMPT}"""
client = anthropic.Client(api_key=anthropic_api_key)
response = client.completions.create(
prompt=prompt,
stop_sequences=[anthropic.HUMAN_PROMPT],
model="claude-v1", # "claude-2" for Claude 2 model
max_tokens_to_sample=100,
)
st.write("### Answer")
st.write(response.completion)

View File

@ -0,0 +1,48 @@
import streamlit as st
from langchain.agents import initialize_agent, AgentType
from langchain.callbacks import StreamlitCallbackHandler
from langchain.chat_models import ChatOpenAI
from langchain.tools import DuckDuckGoSearchRun
with st.sidebar:
openai_api_key = st.text_input(
"OpenAI API Key", key="langchain_search_api_key_openai", type="password"
)
"[Get an OpenAI API key](https://platform.openai.com/account/api-keys)"
"[View the source code](https://github.com/streamlit/llm-examples/blob/main/pages/2_Chat_with_search.py)"
"[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/streamlit/llm-examples?quickstart=1)"
st.title("🔎 LangChain - Chat with search")
"""
In this example, we're using `StreamlitCallbackHandler` to display the thoughts and actions of an agent in an interactive Streamlit app.
Try more LangChain 🤝 Streamlit Agent examples at [github.com/langchain-ai/streamlit-agent](https://github.com/langchain-ai/streamlit-agent).
"""
if "messages" not in st.session_state:
st.session_state["messages"] = [
{"role": "assistant", "content": "Hi, I'm a chatbot who can search the web. How can I help you?"}
]
for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])
if prompt := st.chat_input(placeholder="Who won the Women's U.S. Open in 2018?"):
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
if not openai_api_key:
st.info("Please add your OpenAI API key to continue.")
st.stop()
llm = ChatOpenAI(model_name="gpt-3.5-turbo", openai_api_key=openai_api_key, streaming=True)
search = DuckDuckGoSearchRun(name="Search")
search_agent = initialize_agent(
[search], llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, handle_parsing_errors=True
)
with st.chat_message("assistant"):
st_cb = StreamlitCallbackHandler(st.container(), expand_new_thoughts=False)
response = search_agent.run(st.session_state.messages, callbacks=[st_cb])
st.session_state.messages.append({"role": "assistant", "content": response})
st.write(response)

View File

@ -0,0 +1,22 @@
import streamlit as st
from langchain.llms import OpenAI
st.title("🦜🔗 Langchain Quickstart App")
with st.sidebar:
openai_api_key = st.text_input("OpenAI API Key", type="password")
"[Get an OpenAI API key](https://platform.openai.com/account/api-keys)"
def generate_response(input_text):
llm = OpenAI(temperature=0.7, openai_api_key=openai_api_key)
st.info(llm(input_text))
with st.form("my_form"):
text = st.text_area("Enter text:", "What are 3 key advice for learning how to code?")
submitted = st.form_submit_button("Submit")
if not openai_api_key:
st.info("Please add your OpenAI API key to continue.")
elif submitted:
generate_response(text)

View File

@ -0,0 +1,29 @@
import streamlit as st
from langchain.llms import OpenAI
from langchain.prompts import PromptTemplate
st.title("🦜🔗 Langchain - Blog Outline Generator App")
openai_api_key = st.sidebar.text_input("OpenAI API Key", type="password")
def blog_outline(topic):
# Instantiate LLM model
llm = OpenAI(model_name="text-davinci-003", openai_api_key=openai_api_key)
# Prompt
template = "As an experienced data scientist and technical writer, generate an outline for a blog about {topic}."
prompt = PromptTemplate(input_variables=["topic"], template=template)
prompt_query = prompt.format(topic=topic)
# Run LLM model
response = llm(prompt_query)
# Print results
return st.info(response)
with st.form("myform"):
topic_text = st.text_input("Enter prompt:", "")
submitted = st.form_submit_button("Submit")
if not openai_api_key:
st.info("Please add your OpenAI API key to continue.")
elif submitted:
blog_outline(topic_text)

View File

@ -0,0 +1,65 @@
from openai import OpenAI
import streamlit as st
from streamlit_feedback import streamlit_feedback
import trubrics
with st.sidebar:
openai_api_key = st.text_input("OpenAI API Key", key="feedback_api_key", type="password")
"[Get an OpenAI API key](https://platform.openai.com/account/api-keys)"
"[View the source code](https://github.com/streamlit/llm-examples/blob/main/pages/5_Chat_with_user_feedback.py)"
"[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/streamlit/llm-examples?quickstart=1)"
st.title("📝 Chat with feedback (Trubrics)")
"""
In this example, we're using [streamlit-feedback](https://github.com/trubrics/streamlit-feedback) and Trubrics to collect and store feedback
from the user about the LLM responses.
"""
if "messages" not in st.session_state:
st.session_state.messages = [
{"role": "assistant", "content": "How can I help you? Leave feedback to help me improve!"}
]
if "response" not in st.session_state:
st.session_state["response"] = None
messages = st.session_state.messages
for msg in messages:
st.chat_message(msg["role"]).write(msg["content"])
if prompt := st.chat_input(placeholder="Tell me a joke about sharks"):
messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
if not openai_api_key:
st.info("Please add your OpenAI API key to continue.")
st.stop()
client = OpenAI(api_key=openai_api_key)
response = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
st.session_state["response"] = response.choices[0].message.content
with st.chat_message("assistant"):
messages.append({"role": "assistant", "content": st.session_state["response"]})
st.write(st.session_state["response"])
if st.session_state["response"]:
feedback = streamlit_feedback(
feedback_type="thumbs",
optional_text_label="[Optional] Please provide an explanation",
key=f"feedback_{len(messages)}",
)
# This app is logging feedback to Trubrics backend, but you can send it anywhere.
# The return value of streamlit_feedback() is just a dict.
# Configure your own account at https://trubrics.streamlit.app/
if feedback and "TRUBRICS_EMAIL" in st.secrets:
config = trubrics.init(
email=st.secrets.TRUBRICS_EMAIL,
password=st.secrets.TRUBRICS_PASSWORD,
)
collection = trubrics.collect(
component_name="default",
model="gpt",
response=feedback,
metadata={"chat": messages},
)
trubrics.save(config, collection)
st.toast("Feedback recorded!", icon="📝")