mirror of
https://github.com/aimingmed/aimingmed-ai.git
synced 2026-02-08 16:37:29 +08:00
llm-template
This commit is contained in:
parent
f5eb0d3cf5
commit
91662eba15
9
.gitignore
vendored
9
.gitignore
vendored
@ -192,3 +192,12 @@ data/*
|
|||||||
**/output/*.csv
|
**/output/*.csv
|
||||||
!**/output/.gitkeep
|
!**/output/.gitkeep
|
||||||
!**/data/.gitkeep
|
!**/data/.gitkeep
|
||||||
|
|
||||||
|
# ignore macos files
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
# ignore llmops
|
||||||
|
app/llmops/*
|
||||||
|
|
||||||
|
# ignore wandb files
|
||||||
|
**/wandb/
|
||||||
19
app/llm-template/Pipfile
Normal file
19
app/llm-template/Pipfile
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
[[source]]
|
||||||
|
url = "https://pypi.org/simple"
|
||||||
|
verify_ssl = true
|
||||||
|
name = "pypi"
|
||||||
|
|
||||||
|
[packages]
|
||||||
|
langchain = "*"
|
||||||
|
google-genai = "*"
|
||||||
|
chromadb = "*"
|
||||||
|
SQLAlchemy = "*"
|
||||||
|
wandb = "*"
|
||||||
|
langchain-google-genai = "*"
|
||||||
|
openai = "*"
|
||||||
|
pydantic = "*"
|
||||||
|
|
||||||
|
[dev-packages]
|
||||||
|
|
||||||
|
[requires]
|
||||||
|
python_version = "3.10"
|
||||||
2578
app/llm-template/Pipfile.lock
generated
Normal file
2578
app/llm-template/Pipfile.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
23
app/llm-template/README.md
Normal file
23
app/llm-template/README.md
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# LLM Project Template
|
||||||
|
|
||||||
|
This template provides a starting point for building LLM-powered applications with Langchain, AI Agents, RAG, Summarization, Question-Answering with SQL, and LLMOps.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- Python 3.9+
|
||||||
|
- Pipenv
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pipenv install
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
1. Set the `OPENAI_API_KEY`, `WANDB_API_KEY`, `GOOGLE_API_KEY`, and `LANGSMITH_API_KEY` environment variables in `config.py`.
|
||||||
|
2. Run the `main.py` file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pipenv run python main.py
|
||||||
|
```
|
||||||
3
app/llm-template/agents/agent1.py
Normal file
3
app/llm-template/agents/agent1.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# agent1.py
|
||||||
|
|
||||||
|
# Add your agent implementation here
|
||||||
3
app/llm-template/agents/agent2.py
Normal file
3
app/llm-template/agents/agent2.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# agent2.py
|
||||||
|
|
||||||
|
# Add your agent implementation here
|
||||||
3
app/llm-template/chains/qa_chain.py
Normal file
3
app/llm-template/chains/qa_chain.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# qa_chain.py
|
||||||
|
|
||||||
|
# Add your question-answering chain implementation here
|
||||||
3
app/llm-template/chains/rag_chain.py
Normal file
3
app/llm-template/chains/rag_chain.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# rag_chain.py
|
||||||
|
|
||||||
|
# Add your RAG chain implementation here
|
||||||
3
app/llm-template/chains/sql_chain.py
Normal file
3
app/llm-template/chains/sql_chain.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# sql_chain.py
|
||||||
|
|
||||||
|
# Add your question-answering with SQL chain implementation here
|
||||||
3
app/llm-template/chains/summarization_chain.py
Normal file
3
app/llm-template/chains/summarization_chain.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# summarization_chain.py
|
||||||
|
|
||||||
|
# Add your summarization chain implementation here
|
||||||
9
app/llm-template/config.py
Normal file
9
app/llm-template/config.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# config.py
|
||||||
|
|
||||||
|
# Add your configuration variables here
|
||||||
|
OPENAI_API_KEY = "YOUR_OPENAI_API_KEY"
|
||||||
|
WANDB_API_KEY = "df1c4099caa8afae3fc83140689c7e799fb0cdad"
|
||||||
|
GOOGLE_API_KEY = "AIzaSyCIRXC8q5-gKjT5TjiqLxEdKFEmOEzebNQ"
|
||||||
|
LANGSMITH_API_KEY = "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"
|
||||||
|
|
||||||
|
LANGCHAIN_PROJECT = 'llmops-demo'
|
||||||
3
app/llm-template/llmops/deployment.py
Normal file
3
app/llm-template/llmops/deployment.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# deployment.py
|
||||||
|
|
||||||
|
# Add your deployment scripts and configurations here
|
||||||
3
app/llm-template/llmops/evaluation.py
Normal file
3
app/llm-template/llmops/evaluation.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# evaluation.py
|
||||||
|
|
||||||
|
# Add your evaluation metrics and scripts here
|
||||||
3
app/llm-template/llmops/monitoring.py
Normal file
3
app/llm-template/llmops/monitoring.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# monitoring.py
|
||||||
|
|
||||||
|
# Add your monitoring and logging implementation here
|
||||||
31
app/llm-template/main.py
Normal file
31
app/llm-template/main.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
# main.py
|
||||||
|
import os
|
||||||
|
import wandb
|
||||||
|
from config import GOOGLE_API_KEY, WANDB_API_KEY, LANGSMITH_API_KEY, LANGCHAIN_PROJECT
|
||||||
|
from langchain_google_genai import ChatGoogleGenerativeAI
|
||||||
|
from langchain.callbacks import LangChainTracer
|
||||||
|
from langchain.chains import LLMChain
|
||||||
|
from langchain.prompts import PromptTemplate
|
||||||
|
|
||||||
|
# Set LangSmith environment variables
|
||||||
|
os.environ["LANGCHAIN_TRACING"] = "true"
|
||||||
|
os.environ["LANGCHAIN_ENDPOINT"] = "https://api.smith.langchain.com"
|
||||||
|
os.environ["LANGCHAIN_PROJECT"] = LANGCHAIN_PROJECT
|
||||||
|
os.environ["LANGCHAIN_API_KEY"] = LANGSMITH_API_KEY
|
||||||
|
|
||||||
|
# Initialize Weights & Biases
|
||||||
|
wandb.login(key=WANDB_API_KEY)
|
||||||
|
run = wandb.init(project=LANGCHAIN_PROJECT, entity="aimingmed")
|
||||||
|
|
||||||
|
# Initialize Gemini API
|
||||||
|
tracer = LangChainTracer()
|
||||||
|
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001", google_api_key=GOOGLE_API_KEY, callbacks=[tracer])
|
||||||
|
|
||||||
|
# Example usage of Gemini API
|
||||||
|
prompt_template = PromptTemplate(template="Write a short poem about the sun.", input_variables=[])
|
||||||
|
chain = LLMChain(llm=llm, prompt=prompt_template)
|
||||||
|
response = chain.run({})
|
||||||
|
|
||||||
|
print(response)
|
||||||
|
|
||||||
|
wandb.finish()
|
||||||
6
app/llm-template/requirements.txt
Normal file
6
app/llm-template/requirements.txt
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
langchain
|
||||||
|
google-generativeai
|
||||||
|
chromadb
|
||||||
|
SQLAlchemy
|
||||||
|
weights&biases
|
||||||
|
langsmith
|
||||||
3
app/llm-template/utils/data_loader.py
Normal file
3
app/llm-template/utils/data_loader.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# data_loader.py
|
||||||
|
|
||||||
|
# Add your data loading functions here
|
||||||
3
app/llm-template/utils/embedding_utils.py
Normal file
3
app/llm-template/utils/embedding_utils.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# embedding_utils.py
|
||||||
|
|
||||||
|
# Add your embedding utilities here
|
||||||
Loading…
x
Reference in New Issue
Block a user