Coverage for apps / recipes / api.py: 90%
201 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-14 19:13 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-14 19:13 +0000
1"""
2Recipe API endpoints.
3"""
5import asyncio
6import logging
7from typing import List, Optional
9logger = logging.getLogger(__name__)
11from asgiref.sync import sync_to_async
12from django.shortcuts import get_object_or_404
13from ninja import Router, Schema
15from apps.profiles.utils import aget_current_profile_or_none, get_current_profile_or_none
17from .models import Recipe
18from .services.image_cache import SearchImageCache
19from .services.scraper import RecipeScraper, FetchError, ParseError
20from .services.search import RecipeSearch
22router = Router(tags=["recipes"])
25# Schemas
28class LinkedRecipeOut(Schema):
29 """Minimal recipe info for linked recipe navigation."""
31 id: int
32 title: str
33 relationship: str # "original", "remix", "sibling"
36class RecipeOut(Schema):
37 id: int
38 source_url: Optional[str]
39 canonical_url: str
40 host: str
41 site_name: str
42 title: str
43 author: str
44 description: str
45 image_url: str
46 image: Optional[str] # Local image path
47 ingredients: list
48 ingredient_groups: list
49 instructions: list
50 instructions_text: str
51 prep_time: Optional[int]
52 cook_time: Optional[int]
53 total_time: Optional[int]
54 yields: str
55 servings: Optional[int]
56 category: str
57 cuisine: str
58 cooking_method: str
59 keywords: list
60 dietary_restrictions: list
61 equipment: list
62 nutrition: dict
63 rating: Optional[float]
64 rating_count: Optional[int]
65 language: str
66 links: list
67 ai_tips: list
68 is_remix: bool
69 remix_profile_id: Optional[int]
70 remixed_from_id: Optional[int]
71 linked_recipes: List[LinkedRecipeOut] = []
72 scraped_at: str
73 updated_at: str
75 @staticmethod
76 def resolve_image(obj):
77 if obj.image:
78 return obj.image.url
79 return None
81 @staticmethod
82 def resolve_scraped_at(obj):
83 return obj.scraped_at.isoformat()
85 @staticmethod
86 def resolve_updated_at(obj):
87 return obj.updated_at.isoformat()
89 @staticmethod
90 def resolve_remixed_from_id(obj):
91 return getattr(obj, "remixed_from_id", None)
93 @staticmethod
94 def resolve_linked_recipes(obj):
95 # Return linked_recipes if set, otherwise empty list
96 return getattr(obj, "linked_recipes", [])
99class RecipeListOut(Schema):
100 """Condensed recipe output for list views."""
102 id: int
103 title: str
104 host: str
105 image_url: str
106 image: Optional[str]
107 total_time: Optional[int]
108 rating: Optional[float]
109 is_remix: bool
110 scraped_at: str
112 @staticmethod
113 def resolve_image(obj):
114 if obj.image:
115 return obj.image.url
116 return None
118 @staticmethod
119 def resolve_scraped_at(obj):
120 return obj.scraped_at.isoformat()
123class ScrapeIn(Schema):
124 url: str
127class ErrorOut(Schema):
128 detail: str
131class SearchResultOut(Schema):
132 url: str
133 title: str
134 host: str
135 image_url: str # External URL (fallback)
136 cached_image_url: Optional[str] = None # Local cached URL
137 description: str
138 rating_count: Optional[int] = None
141class SearchOut(Schema):
142 results: List[SearchResultOut]
143 total: int
144 page: int
145 has_more: bool
146 sites: dict
149# Endpoints
150# NOTE: Static routes must come before dynamic routes (e.g., /search/ before /{recipe_id}/)
153@router.get("/", response=List[RecipeListOut])
154def list_recipes(
155 request,
156 host: Optional[str] = None,
157 is_remix: Optional[bool] = None,
158 limit: int = 50,
159 offset: int = 0,
160):
161 """
162 List saved recipes with optional filters.
164 - **host**: Filter by source host (e.g., "allrecipes.com")
165 - **is_remix**: Filter by remix status
166 - **limit**: Number of recipes to return (default 50)
167 - **offset**: Offset for pagination
169 Returns only recipes owned by the current profile.
170 """
171 profile = get_current_profile_or_none(request)
172 if not profile:
173 return []
175 # Only show recipes owned by this profile
176 qs = Recipe.objects.filter(profile=profile).order_by("-scraped_at")
178 if host:
179 qs = qs.filter(host=host)
180 if is_remix is not None:
181 qs = qs.filter(is_remix=is_remix)
183 return qs[offset : offset + limit]
186@router.post("/scrape/", response={201: RecipeOut, 400: ErrorOut, 403: ErrorOut, 502: ErrorOut})
187async def scrape_recipe(request, payload: ScrapeIn):
188 """
189 Scrape a recipe from a URL.
191 The URL is fetched, parsed for recipe data, and saved to the database.
192 If the recipe has an image, it will be downloaded and stored locally.
193 The recipe will be owned by the current profile.
195 Note: Re-scraping the same URL will create a new recipe record.
196 """
197 profile = await aget_current_profile_or_none(request)
198 if not profile:
199 return 403, {"detail": "Profile required to scrape recipes"}
201 scraper = RecipeScraper()
202 logger.info(f"Scrape request: {payload.url}")
204 try:
205 recipe = await scraper.scrape_url(payload.url, profile)
206 logger.info(f'Scrape success: {payload.url} -> recipe {recipe.id} "{recipe.title}"')
207 return 201, recipe
208 except FetchError as e:
209 logger.warning(f"Scrape fetch error: {payload.url} - {e}")
210 return 502, {"detail": str(e)}
211 except ParseError as e:
212 logger.warning(f"Scrape parse error: {payload.url} - {e}")
213 return 400, {"detail": str(e)}
216@router.get("/search/", response=SearchOut)
217async def search_recipes(
218 request,
219 q: str,
220 sources: Optional[str] = None,
221 page: int = 1,
222 per_page: int = 20,
223):
224 """
225 Search for recipes across multiple sites.
227 - **q**: Search query
228 - **sources**: Comma-separated list of hosts to search (optional)
229 - **page**: Page number (default 1)
230 - **per_page**: Results per page (default 20)
232 Returns recipe URLs from enabled search sources.
233 Uses cached images when available for iOS 9 compatibility.
234 Use the scrape endpoint to save a recipe from the results.
235 """
236 source_list = None
237 if sources:
238 source_list = [s.strip() for s in sources.split(",") if s.strip()]
240 search = RecipeSearch()
241 results = await search.search(
242 query=q,
243 sources=source_list,
244 page=page,
245 per_page=per_page,
246 )
248 # Extract image URLs from search results
249 image_urls = [r["image_url"] for r in results["results"] if r.get("image_url")]
251 # Look up already-cached images
252 image_cache = SearchImageCache()
253 cached_urls = await image_cache.get_cached_urls_batch(image_urls)
255 # Add cached_image_url to results
256 for result in results["results"]:
257 external_url = result.get("image_url", "")
258 result["cached_image_url"] = cached_urls.get(external_url)
260 # Cache uncached images in background thread (fire-and-forget)
261 uncached_urls = [url for url in image_urls if url not in cached_urls]
262 if uncached_urls:
263 import threading
264 import asyncio
266 def cache_in_background():
267 """Run async cache_images in a new event loop (thread-safe)."""
268 try:
269 loop = asyncio.new_event_loop()
270 asyncio.set_event_loop(loop)
271 loop.run_until_complete(image_cache.cache_images(uncached_urls))
272 except Exception as e:
273 import logging
275 logger = logging.getLogger(__name__)
276 logger.error(f"Background image caching failed: {e}")
277 finally:
278 loop.close()
280 # Start background thread (daemon=True so it doesn't block shutdown)
281 thread = threading.Thread(target=cache_in_background, daemon=True)
282 thread.start()
284 return results
287@router.get("/cache/health/", response={200: dict})
288def cache_health(request):
289 """
290 Health check endpoint for image cache monitoring.
292 Returns cache statistics and status for monitoring the background
293 image caching system. Use this to verify caching is working correctly
294 and to track cache hit rates.
295 """
296 from apps.recipes.models import CachedSearchImage
298 total = CachedSearchImage.objects.count()
299 success = CachedSearchImage.objects.filter(status=CachedSearchImage.STATUS_SUCCESS).count()
300 pending = CachedSearchImage.objects.filter(status=CachedSearchImage.STATUS_PENDING).count()
301 failed = CachedSearchImage.objects.filter(status=CachedSearchImage.STATUS_FAILED).count()
303 return {
304 "status": "healthy",
305 "cache_stats": {
306 "total": total,
307 "success": success,
308 "pending": pending,
309 "failed": failed,
310 "success_rate": f"{(success / total * 100):.1f}%" if total > 0 else "N/A",
311 },
312 }
315# Dynamic routes with {recipe_id} must come last
318@router.get("/{recipe_id}/", response={200: RecipeOut, 404: ErrorOut})
319def get_recipe(request, recipe_id: int):
320 """
321 Get a recipe by ID.
323 Only returns recipes owned by the current profile.
324 Includes linked_recipes for navigation between original and remixes.
325 """
326 profile = get_current_profile_or_none(request)
327 if not profile:
328 return 404, {"detail": "Recipe not found"}
330 # Only allow access to recipes owned by this profile
331 recipe = get_object_or_404(Recipe, id=recipe_id, profile=profile)
333 # Build linked recipes list for navigation
334 linked_recipes = []
336 # Add original recipe if this is a remix
337 if recipe.remixed_from_id:
338 original = recipe.remixed_from
339 if original and original.profile_id == profile.id:
340 linked_recipes.append(
341 {
342 "id": original.id,
343 "title": original.title,
344 "relationship": "original",
345 }
346 )
347 # Add siblings (other remixes of the same original)
348 siblings = (
349 Recipe.objects.filter(
350 remixed_from=original,
351 profile=profile,
352 )
353 .exclude(id=recipe.id)
354 .values("id", "title")
355 )
356 for sibling in siblings:
357 linked_recipes.append(
358 {
359 "id": sibling["id"],
360 "title": sibling["title"],
361 "relationship": "sibling",
362 }
363 )
365 # Add children (remixes of this recipe)
366 children = Recipe.objects.filter(
367 remixed_from=recipe,
368 profile=profile,
369 ).values("id", "title")
370 for child in children:
371 linked_recipes.append(
372 {
373 "id": child["id"],
374 "title": child["title"],
375 "relationship": "remix",
376 }
377 )
379 # Attach linked recipes to the recipe object for serialization
380 recipe.linked_recipes = linked_recipes
382 return recipe
385@router.delete("/{recipe_id}/", response={204: None, 404: ErrorOut})
386def delete_recipe(request, recipe_id: int):
387 """
388 Delete a recipe by ID.
390 Only the owning profile can delete a recipe.
391 """
392 profile = get_current_profile_or_none(request)
393 if not profile:
394 return 404, {"detail": "Recipe not found"}
396 # Only allow deletion of recipes owned by this profile
397 recipe = get_object_or_404(Recipe, id=recipe_id, profile=profile)
398 recipe.delete()
399 return 204, None