cfa911 commited on
Commit
4a24dbd
·
1 Parent(s): 70e30f8

Inserted changes for the FrontEnd

Browse files
Files changed (1) hide show
  1. app.py +73 -45
app.py CHANGED
@@ -1,13 +1,16 @@
1
- from fastapi import FastAPI, HTTPException
 
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
- from typing import Dict
5
  import uuid
6
- from datetime import datetime
 
 
7
 
8
- app = FastAPI()
9
 
10
- # Enable CORS for React Native development
11
  app.add_middleware(
12
  CORSMiddleware,
13
  allow_origins=["*"],
@@ -15,67 +18,92 @@ app.add_middleware(
15
  allow_headers=["*"],
16
  )
17
 
18
- # In-memory "database" (replace with real DB in production)
19
  jobs_db: Dict[str, Dict] = {}
20
 
21
  class PostRequest(BaseModel):
22
  query: str
23
  topic: str
24
- date: str # Format: "DD-MM-YYYY to DD-MM-YYYY"
25
 
26
- @app.post("/index")
27
- async def create_job(request: PostRequest):
28
- """Receive query/topic/date and return job ID"""
 
 
 
 
 
 
 
 
29
  job_id = str(uuid.uuid4())
30
 
31
- # Store the job (simulating background processing)
32
  jobs_db[job_id] = {
33
  "status": "processing",
 
 
34
  "request": request.dict(),
35
- "created_at": datetime.now().isoformat(),
36
  "result": None
37
  }
38
 
39
- # Simulate processing completion after 5 seconds
40
- # In real apps, use Celery/BackgroundTasks
41
- jobs_db[job_id]["result"] = {
42
- "query": request.query,
43
- "topic": request.topic,
44
- "date_interval": request.date,
45
- "processed_data": f"Analysis result for {request.query} ({request.topic})"
46
- }
47
- jobs_db[job_id]["status"] = "completed"
48
 
49
- return {"status": "success", "id": job_id}
 
 
 
 
 
 
 
50
 
51
- @app.get("/loading")
52
- async def get_results(id: str):
53
- """Check job status with simulated processing delay"""
54
  if id not in jobs_db:
55
  raise HTTPException(status_code=404, detail="Job not found")
56
 
57
  job = jobs_db[id]
58
 
59
- # Simulate variable processing time (5-25 seconds)
60
- if job["status"] == "processing":
61
- elapsed = (datetime.now() - datetime.fromisoformat(job["created_at"])).seconds
62
- if elapsed < 5: # Minimum processing time
63
- await asyncio.sleep(2) # Artificial delay
64
- return {"status": "processing"}
65
 
66
- # Mark as completed after random time (for demo)
67
- if elapsed > 15 or random.random() > 0.7: # 30% chance after 15s
68
- job["status"] = "completed"
69
- job["result"] = {
70
- "processed_data": f"Result for {job['request']['query']}",
71
- "completion_time": elapsed
72
- }
73
 
74
- if job["status"] == "completed":
75
- return {
76
- "status": "completed",
77
- "result": job["result"],
78
- "request": job["request"]
79
- }
 
 
 
 
 
 
80
 
81
- return {"status": "processing"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # main.py
2
+ from fastapi import FastAPI, HTTPException, BackgroundTasks
3
  from fastapi.middleware.cors import CORSMiddleware
4
  from pydantic import BaseModel
5
+ from typing import Dict, Optional
6
  import uuid
7
+ from datetime import datetime, timedelta
8
+ import asyncio
9
+ import random
10
 
11
+ app = FastAPI(title="Kairos News API", version="1.0")
12
 
13
+ # Enable CORS
14
  app.add_middleware(
15
  CORSMiddleware,
16
  allow_origins=["*"],
 
18
  allow_headers=["*"],
19
  )
20
 
21
+ # In-memory database simulation
22
  jobs_db: Dict[str, Dict] = {}
23
 
24
  class PostRequest(BaseModel):
25
  query: str
26
  topic: str
27
+ date: str # Format: "YYYY/MM to YYYY/MM"
28
 
29
+ class JobStatus(BaseModel):
30
+ id: str
31
+ status: str # "processing", "completed", "failed"
32
+ created_at: datetime
33
+ completed_at: Optional[datetime]
34
+ request: PostRequest
35
+ result: Optional[Dict]
36
+
37
+ @app.post("/index", response_model=JobStatus)
38
+ async def create_job(request: PostRequest, background_tasks: BackgroundTasks):
39
+ """Create a new processing job"""
40
  job_id = str(uuid.uuid4())
41
 
42
+ # Store initial job data
43
  jobs_db[job_id] = {
44
  "status": "processing",
45
+ "created_at": datetime.now(),
46
+ "completed_at": None,
47
  "request": request.dict(),
 
48
  "result": None
49
  }
50
 
51
+ # Simulate background processing
52
+ background_tasks.add_task(process_job, job_id)
 
 
 
 
 
 
 
53
 
54
+ return {
55
+ "id": job_id,
56
+ "status": "processing",
57
+ "created_at": jobs_db[job_id]["created_at"],
58
+ "completed_at": None,
59
+ "request": request,
60
+ "result": None
61
+ }
62
 
63
+ @app.get("/loading", response_model=JobStatus)
64
+ async def get_job_status(id: str):
65
+ """Check job status with timeout simulation"""
66
  if id not in jobs_db:
67
  raise HTTPException(status_code=404, detail="Job not found")
68
 
69
  job = jobs_db[id]
70
 
71
+ # Simulate random processing time (3-25 seconds)
72
+ elapsed = datetime.now() - job["created_at"]
73
+ if elapsed < timedelta(seconds=3):
74
+ await asyncio.sleep(1) # Artificial delay
 
 
75
 
76
+ # 10% chance of failure for demonstration
77
+ if random.random() < 0.1 and job["status"] == "processing":
78
+ job["status"] = "failed"
79
+ job["result"] = {"error": "Random processing failure"}
 
 
 
80
 
81
+ return {
82
+ "id": id,
83
+ "status": job["status"],
84
+ "created_at": job["created_at"],
85
+ "completed_at": job["completed_at"],
86
+ "request": job["request"],
87
+ "result": job["result"]
88
+ }
89
+
90
+ async def process_job(job_id: str):
91
+ """Background task to simulate processing"""
92
+ await asyncio.sleep(random.uniform(3, 10)) # Random processing time
93
 
94
+ if job_id in jobs_db:
95
+ jobs_db[job_id]["status"] = "completed"
96
+ jobs_db[job_id]["completed_at"] = datetime.now()
97
+ jobs_db[job_id]["result"] = {
98
+ "query": jobs_db[job_id]["request"]["query"],
99
+ "topic": jobs_db[job_id]["request"]["topic"],
100
+ "date_range": jobs_db[job_id]["request"]["date"],
101
+ "analysis": f"Processed results for {jobs_db[job_id]['request']['query']}",
102
+ "sources": ["Source A", "Source B", "Source C"],
103
+ "summary": "This is a generated summary based on your query."
104
+ }
105
+
106
+ @app.get("/jobs")
107
+ async def list_jobs():
108
+ """Debug endpoint to view all jobs"""
109
+ return jobs_db