Initial commit

This commit is contained in:
Zhongwei Li
2025-11-30 08:44:08 +08:00
commit 12f823b4a9
24 changed files with 4951 additions and 0 deletions

View File

@@ -0,0 +1,80 @@
# PocketFlow Project Template
This template provides a best-practice structure for PocketFlow projects.
Source: https://github.com/The-Pocket/PocketFlow-Template-Python
## Project Structure
```
template/
├── main.py # Entry point
├── flow.py # Flow definition
├── nodes.py # Node implementations
├── utils.py # Utility functions (LLM wrappers, etc.)
└── requirements.txt # Python dependencies
```
## Quick Start
1. **Install dependencies:**
```bash
pip install -r requirements.txt
```
2. **Configure your LLM:**
Edit `utils.py` and implement `call_llm()` for your provider (OpenAI, Anthropic, or Gemini)
3. **Set API key:**
```bash
export OPENAI_API_KEY=sk-...
# or
export ANTHROPIC_API_KEY=sk-ant-...
# or
export GEMINI_API_KEY=...
```
4. **Run:**
```bash
python main.py
```
## Customization
- **Add nodes:** Create new node classes in `nodes.py`
- **Modify flow:** Update connections in `flow.py`
- **Add utilities:** Implement helpers in `utils.py`
- **Update logic:** Customize `main.py` for your use case
## Best Practices Demonstrated
1. **Separation of Concerns:**
- `nodes.py` - Node logic only
- `flow.py` - Flow orchestration only
- `utils.py` - Reusable utilities
- `main.py` - Application entry point
2. **Factory Pattern:**
- `create_qa_flow()` makes flow reusable
- Easy to test and modify
3. **Clear Data Flow:**
- Shared store pattern for data passing
- Explicit state management
4. **Configuration:**
- Environment variables for API keys
- requirements.txt for dependencies
## Next Steps
1. Implement your `call_llm()` function
2. Add your business logic to nodes
3. Define your workflow in flow.py
4. Run and iterate!
## Resources
- **PocketFlow Docs:** https://the-pocket.github.io/PocketFlow/
- **GitHub:** https://github.com/The-Pocket/PocketFlow
- **Examples:** See the cookbook/ directory for more patterns

View File

@@ -0,0 +1,37 @@
"""
PocketFlow Template - Flow Definition
Source: https://github.com/The-Pocket/PocketFlow-Template-Python
This module defines the QA flow by connecting nodes.
"""
from pocketflow import Flow
from nodes import GetQuestionNode, AnswerNode
def create_qa_flow():
"""
Create a simple Question-Answer flow
Flow structure:
GetQuestionNode >> AnswerNode
Returns:
Flow: Configured QA flow
"""
# Create nodes
get_question_node = GetQuestionNode()
answer_node = AnswerNode()
# Connect nodes sequentially
get_question_node >> answer_node
# Create flow with start node
qa_flow = Flow(start=get_question_node)
return qa_flow
# For direct module execution
qa_flow = create_qa_flow()

View File

@@ -0,0 +1,35 @@
"""
PocketFlow Template - Main Entry Point
Source: https://github.com/The-Pocket/PocketFlow-Template-Python
This template demonstrates best practices for structuring a PocketFlow project.
"""
from flow import create_qa_flow
def main():
"""Main entry point for the application"""
# Prepare shared data store
shared = {
"question": "In one sentence, what's the end of universe?",
"answer": None
}
# Create and run the flow
qa_flow = create_qa_flow()
qa_flow.run(shared)
# Display results
print(f"\n{'='*60}")
print("Results:")
print(f"{'='*60}")
print(f"Question: {shared['question']}")
print(f"Answer: {shared['answer']}")
print(f"{'='*60}\n")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,56 @@
"""
PocketFlow Template - Node Definitions
Source: https://github.com/The-Pocket/PocketFlow-Template-Python
This module contains the node definitions for the QA flow.
Each node implements the prep/exec/post pattern.
"""
from pocketflow import Node
# from utils import call_llm # Uncomment when implemented
class GetQuestionNode(Node):
"""Node to get user input"""
def prep(self, shared):
"""Prepare: can access shared store but no data needed"""
return None
def exec(self, prep_res):
"""Execute: get user input"""
question = input("\nEnter your question: ")
return question
def post(self, shared, prep_res, exec_res):
"""Post: store question in shared store"""
shared["question"] = exec_res
print(f"✓ Question received: {exec_res}")
return "default"
class AnswerNode(Node):
"""Node to generate answer using LLM"""
def prep(self, shared):
"""Prepare: get question from shared store"""
return shared.get("question", "")
def exec(self, question):
"""Execute: call LLM to get answer"""
if not question:
return "No question provided"
# Call your LLM implementation
# answer = call_llm(question)
# Placeholder
answer = f"This is a placeholder answer to: {question}\nImplement call_llm() in utils.py"
return answer
def post(self, shared, prep_res, exec_res):
"""Post: store answer in shared store"""
shared["answer"] = exec_res
print(f"✓ Answer generated ({len(exec_res)} chars)")
return "default"

View File

@@ -0,0 +1,20 @@
# PocketFlow Template Requirements
# Core framework
pocketflow
# LLM Providers (uncomment what you need)
# openai>=1.0.0
# anthropic>=0.18.0
# google-generativeai>=0.3.0
# Optional utilities
# requests>=2.31.0
# beautifulsoup4>=4.12.0
# faiss-cpu>=1.7.4
# numpy>=1.24.0
# Development tools
# pytest>=7.4.0
# black>=23.0.0
# flake8>=6.0.0

View File

@@ -0,0 +1,61 @@
"""
PocketFlow Template - Utility Functions
Source: https://github.com/The-Pocket/PocketFlow-Template-Python
This module contains utility functions like LLM wrappers.
"""
import os
def call_llm(prompt):
"""
Call your LLM provider
Args:
prompt (str): The prompt to send to the LLM
Returns:
str: The LLM response
TODO: Implement your LLM provider here
"""
# Example: OpenAI
"""
from openai import OpenAI
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
response = client.chat.completions.create(
model="gpt-4o",
messages=[{"role": "user", "content": prompt}]
)
return response.choices[0].message.content
"""
# Example: Anthropic
"""
from anthropic import Anthropic
client = Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY"))
response = client.messages.create(
model="claude-sonnet-4-0",
messages=[{"role": "user", "content": prompt}]
)
return response.content[0].text
"""
# Example: Google Gemini
"""
from google import genai
client = genai.Client(api_key=os.getenv("GEMINI_API_KEY"))
response = client.models.generate_content(
model='gemini-2.0-flash-exp',
contents=prompt
)
return response.text
"""
raise NotImplementedError(
"Implement your LLM provider in utils.py\n"
"See examples above for OpenAI, Anthropic, or Google Gemini"
)