Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions api/chk.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

app = FastAPI()


@app.get("/api/chk")
async def check():
return {"status": "ok", "message": "Standalone routing is working!"}
16 changes: 9 additions & 7 deletions api/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,15 @@ async def debug_root():
# Only expose detailed error info in debug mode
response = {"error": "Import failed", "message": _error_message}
if _is_debug:
response.update({
"traceback": _error_traceback,
"sys_path": sys.path,
"cwd": os.getcwd(),
"api_dir_exists": Path("api").exists(),
"api_main_exists": Path("api/main.py").exists(),
})
response.update(
{
"traceback": _error_traceback,
"sys_path": sys.path,
"cwd": os.getcwd(),
"api_dir_exists": Path("api").exists(),
"api_main_exists": Path("api/main.py").exists(),
}
)
return response

@app.get("/health")
Expand Down
19 changes: 9 additions & 10 deletions api/routes/blog.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ async def start_blog_generation(
"""Start blog generation process.

Runs until the keyword selection interrupt, then returns suggested keywords.

API keys can be provided via HTTP headers:
- X-OpenAI-API-Key
- X-Anthropic-API-Key
Expand All @@ -117,7 +117,7 @@ async def start_blog_generation(
# Prepare input with api_keys in config
config_dict = request.config.model_dump() if request.config else {}
config_dict["api_keys"] = api_keys # Inject API keys into config

input_state = {
"url": str(request.url),
"user_keywords": request.user_keywords,
Expand Down Expand Up @@ -159,9 +159,7 @@ async def start_blog_generation(
"error": str(e), # Keep for internal diagnostics
"created_at": time.time(),
}
raise HTTPException(
status_code=500, detail="Blog generation failed"
) from None
raise HTTPException(status_code=500, detail="Blog generation failed") from None


@router.post("/{job_id}/resume", response_model=BlogJobResponse)
Expand Down Expand Up @@ -199,7 +197,6 @@ async def resume_blog_generation(
graph = get_blog_writer_graph()

try:

# Update state with selected keywords, then resume
await graph.aupdate_state(
config,
Expand All @@ -214,7 +211,9 @@ async def resume_blog_generation(
html_content=result.get("html_content", ""),
suggested_keywords=suggested_keywords,
selected_keywords=request.selected_keywords,
seo_meta=SEOMeta(**(result.get("seo_meta") or {"title": "", "description": ""})),
seo_meta=SEOMeta(
**(result.get("seo_meta") or {"title": "", "description": ""})
),
image_urls=result.get("image_urls", []),
)

Expand All @@ -233,9 +232,7 @@ async def resume_blog_generation(
async with _jobs_lock:
_jobs[job_id]["status"] = JobStatus.FAILED
_jobs[job_id]["error"] = str(e) # Keep for internal diagnostics
raise HTTPException(
status_code=500, detail="Blog generation failed"
) from None
raise HTTPException(status_code=500, detail="Blog generation failed") from None


@router.get("/{job_id}/status", response_model=BlogJobStatusResponse)
Expand All @@ -253,6 +250,8 @@ async def get_job_status(job_id: str) -> BlogJobStatusResponse:
result=job.get("result"),
error=job.get("error"),
)


@router.get("/jobs", response_model=list[BlogJobStatusResponse])
async def list_jobs() -> list[BlogJobStatusResponse]:
"""List all blog generation jobs in memory."""
Expand Down
2 changes: 1 addition & 1 deletion api/routes/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ async def chat(
api_keys: APIKeys = Depends(get_api_keys),
) -> ChatResponse:
"""Send a message to the chat agent.

API keys can be provided via HTTP headers:
- X-OpenAI-API-Key
- X-Anthropic-API-Key
Expand Down
6 changes: 4 additions & 2 deletions api/routes/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
@router.get("/scrape")
async def scrape_url(
url: str = Query(..., description="URL to scrape"),
type: ScraperType = Query(ScraperType.BEAUTIFULSOUP, description="Scraper type")
type: ScraperType = Query(ScraperType.BEAUTIFULSOUP, description="Scraper type"),
):
"""Scrape content from a URL directly."""
try:
Expand All @@ -28,7 +28,9 @@ async def scrape_url(
@router.get("/images/search")
async def search_images(
query: str = Query(..., description="Search query"),
provider: ImageProvider = Query(ImageProvider.UNSPLASH, description="Image provider")
provider: ImageProvider = Query(
ImageProvider.UNSPLASH, description="Image provider"
),
):
"""Search or generate images directly."""
try:
Expand Down
40 changes: 26 additions & 14 deletions api/schemas/api_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,39 +9,48 @@

class APIKeys(BaseModel):
"""Container for LLM API keys.

Holds API keys for various LLM providers. Keys can be provided via
HTTP headers or fall back to environment variables.
"""

openai_api_key: Optional[str] = None
anthropic_api_key: Optional[str] = None
google_api_key: Optional[str] = None
openrouter_api_key: Optional[str] = None

tavily_api_key: Optional[str] = None

def get_openai_key(self) -> Optional[str]:
"""Get OpenAI API key (header or env fallback)."""
return self.openai_api_key or os.getenv("OPENAI_API_KEY")

def get_anthropic_key(self) -> Optional[str]:
"""Get Anthropic API key (header or env fallback)."""
return self.anthropic_api_key or os.getenv("ANTHROPIC_API_KEY")

def get_google_key(self) -> Optional[str]:
"""Get Google API key (header or env fallback)."""
return self.google_api_key or os.getenv("GOOGLE_API_KEY") or os.getenv("GEMINI_API_KEY")

return (
self.google_api_key
or os.getenv("GOOGLE_API_KEY")
or os.getenv("GEMINI_API_KEY")
)

def get_openrouter_key(self) -> Optional[str]:
"""Get OpenRouter API key (header or env fallback)."""
return self.openrouter_api_key or os.getenv("OPENROUTER_API_KEY")


def get_tavily_key(self) -> Optional[str]:
"""Get Tavily API key (header or env fallback)."""
return self.tavily_api_key or os.getenv("TAVILY_API_KEY")

def has_any_key(self) -> bool:
"""Check if at least one LLM API key is available."""
return bool(
self.get_openai_key() or
self.get_anthropic_key() or
self.get_google_key() or
self.get_openrouter_key()
self.get_openai_key()
or self.get_anthropic_key()
or self.get_google_key()
or self.get_openrouter_key()
)


Expand All @@ -50,20 +59,23 @@ async def get_api_keys(
x_anthropic_api_key: Optional[str] = Header(None, alias="X-Anthropic-API-Key"),
x_google_api_key: Optional[str] = Header(None, alias="X-Google-API-Key"),
x_openrouter_api_key: Optional[str] = Header(None, alias="X-OpenRouter-API-Key"),
x_tavily_api_key: Optional[str] = Header(None, alias="X-Tavily-API-Key"),
) -> APIKeys:
"""FastAPI dependency to extract API keys from request headers.

Headers:
X-OpenAI-API-Key: OpenAI API key
X-Anthropic-API-Key: Anthropic API key
X-Google-API-Key: Google/Gemini API key
X-OpenRouter-API-Key: OpenRouter API key

X-Tavily-API-Key: Tavily API key

Falls back to environment variables if headers are not provided.
"""
return APIKeys(
openai_api_key=x_openai_api_key,
anthropic_api_key=x_anthropic_api_key,
google_api_key=x_google_api_key,
openrouter_api_key=x_openrouter_api_key,
tavily_api_key=x_tavily_api_key,
)
4 changes: 4 additions & 0 deletions api/schemas/post.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,25 +5,29 @@

class PostBase(BaseModel):
"""블로그 포스트 기본 스키마"""

title: str = Field(..., min_length=1, max_length=200, description="포스트 제목")
content: str = Field(..., min_length=1, description="포스트 본문")
tags: List[str] = Field(default_factory=list, description="태그 목록")


class PostCreate(PostBase):
"""포스트 생성 스키마"""

pass


class PostUpdate(BaseModel):
"""포스트 수정 스키마 - 모든 필드 선택적"""

title: Optional[str] = Field(None, min_length=1, max_length=200)
content: Optional[str] = Field(None, min_length=1)
tags: Optional[List[str]] = None


class PostResponse(PostBase):
"""포스트 응답 스키마"""

id: int
created_at: datetime
updated_at: Optional[datetime] = None
Expand Down
19 changes: 10 additions & 9 deletions api/services/post_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

class PostService:
"""블로그 포스트 서비스 (인메모리 저장소)"""

# 인메모리 저장소 (데모용)
_posts: dict[int, dict] = {}
_counter: int = 0
Expand Down Expand Up @@ -50,32 +50,33 @@ def get_multi(
) -> List[PostResponse]:
"""포스트 목록 조회"""
posts = list(cls._posts.values())

if search:
posts = [
p for p in posts
p
for p in posts
if search.lower() in p["title"].lower()
or search.lower() in p["content"].lower()
]

posts = sorted(posts, key=lambda x: x["created_at"], reverse=True)
return [PostResponse(**p) for p in posts[skip:skip + limit]]
return [PostResponse(**p) for p in posts[skip : skip + limit]]

@classmethod
def update(cls, post_id: int, post_update: PostUpdate) -> Optional[PostResponse]:
"""포스트 수정"""
if post_id not in cls._posts:
return None

post_data = cls._posts[post_id]
update_dict = post_update.model_dump(exclude_unset=True)

for key, value in update_dict.items():
post_data[key] = value

post_data["updated_at"] = datetime.now()
cls._posts[post_id] = post_data

return PostResponse(**post_data)

@classmethod
Expand Down
16 changes: 14 additions & 2 deletions casts/blog_writer/CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,16 @@

## 아키텍처 다이어그램

> 키워드 선택 후 Tavily 웹 리서치를 거쳐 최신 정보를 반영한 블로그를 작성합니다.

```mermaid
graph TD
START([START]) --> FetchContent
FetchContent --> AnalyzeContent
AnalyzeContent --> SuggestKeywords
SuggestKeywords --> HumanSelectKeywords
HumanSelectKeywords --> WriteBlog
HumanSelectKeywords --> WebResearch
WebResearch --> WriteBlog
WriteBlog --> OptimizeSEO
OptimizeSEO --> GenerateImages
GenerateImages --> ConvertToHTML
Expand Down Expand Up @@ -63,6 +66,7 @@ class BlogState(TypedDict):
analyzed_content: dict # 분석된 핵심 내용
suggested_keywords: list[str] # 제안된 키워드
selected_keywords: list[str] # 선택된 키워드
search_results: list[dict] # 웹 리서치 결과
blog_markdown: str # 블로그 마크다운 초안
image_urls: list[str] # 이미지 URL들

Expand All @@ -79,7 +83,8 @@ class BlogState(TypedDict):
| `AnalyzeContent` | 핵심 내용 분석 및 요약 | raw_content | analyzed_content |
| `SuggestKeywords` | 키워드 3개 제안 | analyzed_content, user_keywords | suggested_keywords |
| `HumanSelectKeywords` | 사용자 키워드 선택 (interrupt) | suggested_keywords | selected_keywords |
| `WriteBlog` | 블로그 마크다운 작성 | analyzed_content, selected_keywords | blog_markdown |
| `WebResearch` | 선택된 키워드로 웹 리서치 (Tavily) | selected_keywords | search_results |
| `WriteBlog` | 블로그 마크다운 작성 | analyzed_content, selected_keywords, search_results | blog_markdown |
| `OptimizeSEO` | SEO 메타 정보 생성 | blog_markdown, selected_keywords | seo_meta |
| `GenerateImages` | 이미지 생성/수집 | analyzed_content | image_urls |
| `ConvertToHTML` | 최종 HTML 변환 | blog_markdown, image_urls, seo_meta | html_content |
Expand All @@ -93,6 +98,7 @@ class BlogState(TypedDict):
langchain-openai = "*"
langchain-anthropic = "*"
langchain-google-genai = "*"
langchain-tavily = "*"
beautifulsoup4 = "*"
playwright = "*"
openai = "*" # DALL-E
Expand All @@ -108,6 +114,9 @@ OPENAI_API_KEY=
ANTHROPIC_API_KEY=
GOOGLE_API_KEY=

# Web Search
TAVILY_API_KEY=

# Image Generation (사용자 선택)
STABILITY_API_KEY=
UNSPLASH_ACCESS_KEY=
Expand All @@ -132,6 +141,9 @@ class ScraperType(str, Enum):
BEAUTIFULSOUP = "beautifulsoup"
PLAYWRIGHT = "playwright"

class SearchProvider(str, Enum):
TAVILY = "tavily"

class BlogWriterConfig(BaseModel):
llm_provider: LLMProvider = LLMProvider.OPENAI
image_provider: ImageProvider = ImageProvider.DALLE
Expand Down
11 changes: 7 additions & 4 deletions casts/blog_writer/graph.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
"""Entry point for the Blog Writer graph.

Assembles 8 nodes into a Sequential + Human-in-the-loop graph:
Assembles 9 nodes into a Sequential + Human-in-the-loop graph:
START → FetchContent → AnalyzeContent → SuggestKeywords →
HumanSelectKeywords (interrupt) → WriteBlogOptimizeSEO
GenerateImages → ConvertToHTML → END
HumanSelectKeywords (interrupt) → WebResearchWriteBlog
OptimizeSEO → GenerateImages → ConvertToHTML → END
"""

from langgraph.graph import END, START, StateGraph
Expand All @@ -17,6 +17,7 @@
HumanSelectKeywords,
OptimizeSEO,
SuggestKeywords,
WebResearch,
WriteBlog,
)
from casts.blog_writer.modules.state import BlogState, InputState, OutputState
Expand Down Expand Up @@ -52,6 +53,7 @@ def build(self):
builder.add_node("analyze_content", AnalyzeContent())
builder.add_node("suggest_keywords", SuggestKeywords())
builder.add_node("human_select_keywords", HumanSelectKeywords())
builder.add_node("web_research", WebResearch())
builder.add_node("write_blog", WriteBlog())
builder.add_node("optimize_seo", OptimizeSEO())
builder.add_node("generate_images", GenerateImages())
Expand All @@ -62,7 +64,8 @@ def build(self):
builder.add_edge("fetch_content", "analyze_content")
builder.add_edge("analyze_content", "suggest_keywords")
builder.add_edge("suggest_keywords", "human_select_keywords")
builder.add_edge("human_select_keywords", "write_blog")
builder.add_edge("human_select_keywords", "web_research")
builder.add_edge("web_research", "write_blog")
builder.add_edge("write_blog", "optimize_seo")
builder.add_edge("optimize_seo", "generate_images")
builder.add_edge("generate_images", "convert_to_html")
Expand Down
Loading