Spaces:
Running
Running
Upload 12 files
Browse files- app/api/routes/auth_router.py +414 -283
- app/api/routes/projects_router.py +297 -138
- app/utils/db_http.py +578 -0
- main.py +254 -130
app/api/routes/auth_router.py
CHANGED
@@ -3,10 +3,18 @@ from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
|
3 |
from passlib.context import CryptContext
|
4 |
from jose import JWTError, jwt
|
5 |
from datetime import datetime, timedelta
|
6 |
-
from typing import Optional
|
7 |
import os
|
|
|
|
|
8 |
from pydantic import BaseModel, EmailStr
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
router = APIRouter()
|
11 |
|
12 |
# Password hashing
|
@@ -88,58 +96,71 @@ async def get_current_user(request: Request, token: str = Depends(oauth2_scheme)
|
|
88 |
except JWTError:
|
89 |
raise credentials_exception
|
90 |
|
91 |
-
|
92 |
-
|
93 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
94 |
raise credentials_exception
|
95 |
-
return user
|
96 |
|
97 |
# Routes
|
98 |
@router.post("/register", response_model=UserResponse)
|
99 |
async def register(request: Request, user: UserCreate):
|
100 |
-
import logging
|
101 |
-
import time
|
102 |
-
import sys
|
103 |
-
logger = logging.getLogger("auth-server")
|
104 |
-
|
105 |
-
# Log the database connection type and version
|
106 |
-
db_type = getattr(request.app, "db_type", "unknown")
|
107 |
-
logger.info(f"Database connection type: {db_type}")
|
108 |
-
|
109 |
-
# Log database connection details
|
110 |
-
if hasattr(request.app, "db_conn"):
|
111 |
-
logger.info(f"Database connection object type: {type(request.app.db_conn).__name__}")
|
112 |
-
if hasattr(request.app, "last_successful_connection_method"):
|
113 |
-
logger.info(f"Last successful connection method: {request.app.last_successful_connection_method}")
|
114 |
-
|
115 |
# Generate a unique identifier for this registration attempt
|
116 |
registration_id = f"reg_{int(time.time())}"
|
117 |
logger.info(f"[{registration_id}] Starting registration for email: {user.email}")
|
118 |
|
119 |
try:
|
120 |
-
# Step 1: Check if user already exists
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
logger.error(f"[{registration_id}] Error checking if user exists (attempt {attempt+1}): {str(e)}")
|
140 |
-
if attempt == 2: # Last attempt
|
141 |
-
raise
|
142 |
-
time.sleep(1) # Wait before retrying
|
143 |
|
144 |
# Step 2: Hash the password
|
145 |
logger.info(f"[{registration_id}] Hashing password")
|
@@ -153,183 +174,70 @@ async def register(request: Request, user: UserCreate):
|
|
153 |
detail=f"Error hashing password: {str(e)}"
|
154 |
)
|
155 |
|
156 |
-
# Step 3: Insert the new user
|
157 |
-
|
158 |
-
for attempt in range(3):
|
159 |
-
try:
|
160 |
-
logger.info(f"[{registration_id}] Inserting new user (attempt {attempt+1})")
|
161 |
-
|
162 |
-
# Different approaches based on connection type
|
163 |
-
if db_type == "http-api":
|
164 |
-
# First, get the highest ID to simulate auto-increment
|
165 |
-
max_id_result = request.app.db_conn.execute("SELECT MAX(id) FROM users").fetchone()
|
166 |
-
logger.info(f"[{registration_id}] Max ID result: {max_id_result}")
|
167 |
-
|
168 |
-
next_id = 1
|
169 |
-
if max_id_result and max_id_result[0] is not None:
|
170 |
-
next_id = int(max_id_result[0]) + 1
|
171 |
-
|
172 |
-
# Insert with explicit ID
|
173 |
-
insert_query = "INSERT INTO users (id, email, hashed_password) VALUES (?, ?, ?)"
|
174 |
-
request.app.db_conn.execute(insert_query, (next_id, user.email, hashed_password))
|
175 |
-
user_id = next_id
|
176 |
-
logger.info(f"[{registration_id}] Inserted user with explicit ID: {user_id}")
|
177 |
-
elif db_type == "libsql-experimental":
|
178 |
-
# For libsql-experimental, we need to use a transaction
|
179 |
-
logger.info(f"[{registration_id}] Using libsql-experimental approach")
|
180 |
-
|
181 |
-
# Start a transaction
|
182 |
-
conn = request.app.db_conn
|
183 |
|
184 |
-
|
185 |
-
|
186 |
-
|
|
|
|
|
|
|
187 |
|
188 |
-
|
189 |
-
|
190 |
-
user_id = cursor.lastrowid
|
191 |
-
logger.info(f"[{registration_id}] Got lastrowid: {user_id}")
|
192 |
-
except Exception as e:
|
193 |
-
logger.warning(f"[{registration_id}] Could not get lastrowid: {str(e)}")
|
194 |
-
|
195 |
-
# If lastrowid is not available, try to get the ID using a query
|
196 |
-
try:
|
197 |
-
# Get the ID using the email (which should be unique)
|
198 |
-
id_query = "SELECT id FROM users WHERE email = ? ORDER BY id DESC LIMIT 1"
|
199 |
-
id_result = conn.execute(id_query, (user.email,)).fetchone()
|
200 |
-
if id_result:
|
201 |
-
user_id = id_result[0]
|
202 |
-
logger.info(f"[{registration_id}] Got ID from query: {user_id}")
|
203 |
-
except Exception as e2:
|
204 |
-
logger.error(f"[{registration_id}] Error getting ID from query: {str(e2)}")
|
205 |
-
else:
|
206 |
-
# Standard approach for other connection types
|
207 |
-
logger.info(f"[{registration_id}] Using standard approach")
|
208 |
-
insert_query = "INSERT INTO users (email, hashed_password) VALUES (?, ?)"
|
209 |
-
cursor = request.app.db_conn.execute(insert_query, (user.email, hashed_password))
|
210 |
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
|
|
216 |
|
217 |
-
|
218 |
-
logger.info(f"[{registration_id}] Committing transaction")
|
219 |
-
request.app.db_conn.commit()
|
220 |
-
logger.info(f"[{registration_id}] Transaction committed")
|
221 |
|
222 |
-
|
223 |
-
|
224 |
-
verify_result = request.app.db_conn.execute(verify_query, (user.email,)).fetchone()
|
225 |
-
logger.info(f"[{registration_id}] Verification result: {verify_result}")
|
226 |
|
227 |
-
|
228 |
-
if not user_id:
|
229 |
-
user_id = verify_result[0]
|
230 |
-
logger.info(f"[{registration_id}] User verified with ID: {user_id}")
|
231 |
-
break
|
232 |
-
else:
|
233 |
-
logger.error(f"[{registration_id}] User not found after insert!")
|
234 |
-
if attempt == 2: # Last attempt
|
235 |
-
raise Exception("User was not found after insert")
|
236 |
-
time.sleep(1) # Wait before retrying
|
237 |
-
except Exception as e:
|
238 |
-
logger.error(f"[{registration_id}] Error inserting user (attempt {attempt+1}): {str(e)}")
|
239 |
-
if attempt == 2: # Last attempt
|
240 |
-
raise
|
241 |
-
time.sleep(1) # Wait before retrying
|
242 |
-
|
243 |
-
# Step 4: Retrieve the full user record - with retry
|
244 |
-
new_user = None
|
245 |
-
for attempt in range(3):
|
246 |
-
try:
|
247 |
-
# Try by ID if we have it
|
248 |
-
if user_id:
|
249 |
-
logger.info(f"[{registration_id}] Retrieving user by ID: {user_id} (attempt {attempt+1})")
|
250 |
-
new_user = request.app.db_conn.execute("SELECT * FROM users WHERE id = ?", (user_id,)).fetchone()
|
251 |
-
logger.info(f"[{registration_id}] Result by ID: {new_user}")
|
252 |
-
|
253 |
-
# If not found by ID, try by email
|
254 |
-
if not new_user:
|
255 |
-
logger.info(f"[{registration_id}] Retrieving user by email: {user.email} (attempt {attempt+1})")
|
256 |
-
new_user = request.app.db_conn.execute("SELECT * FROM users WHERE email = ?", (user.email,)).fetchone()
|
257 |
-
logger.info(f"[{registration_id}] Result by email: {new_user}")
|
258 |
-
|
259 |
-
if new_user:
|
260 |
-
logger.info(f"[{registration_id}] User retrieved successfully")
|
261 |
-
break
|
262 |
-
else:
|
263 |
-
logger.warning(f"[{registration_id}] User not found (attempt {attempt+1})")
|
264 |
-
|
265 |
-
# Dump all users for debugging
|
266 |
-
try:
|
267 |
-
all_users = request.app.db_conn.execute("SELECT id, email FROM users").fetchall()
|
268 |
-
logger.info(f"[{registration_id}] All users in database: {all_users}")
|
269 |
-
except Exception as e:
|
270 |
-
logger.error(f"[{registration_id}] Error retrieving all users: {str(e)}")
|
271 |
-
|
272 |
-
if attempt == 2: # Last attempt
|
273 |
-
# Create a minimal response with what we know
|
274 |
-
return {
|
275 |
-
"id": user_id or 0,
|
276 |
-
"email": user.email,
|
277 |
-
"is_admin": False,
|
278 |
-
"created_at": ""
|
279 |
-
}
|
280 |
-
time.sleep(1) # Wait before retrying
|
281 |
-
except Exception as e:
|
282 |
-
logger.error(f"[{registration_id}] Error retrieving user (attempt {attempt+1}): {str(e)}")
|
283 |
-
if attempt == 2: # Last attempt
|
284 |
-
# Create a minimal response with what we know
|
285 |
-
return {
|
286 |
-
"id": user_id or 0,
|
287 |
-
"email": user.email,
|
288 |
-
"is_admin": False,
|
289 |
-
"created_at": ""
|
290 |
-
}
|
291 |
-
time.sleep(1) # Wait before retrying
|
292 |
|
293 |
-
# Step 5: Construct the response
|
294 |
if not new_user:
|
295 |
-
logger.
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
"
|
300 |
-
"
|
301 |
-
"
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
|
326 |
-
|
327 |
-
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
-
"created_at": ""
|
332 |
-
}
|
333 |
except Exception as e:
|
334 |
logger.error(f"[{registration_id}] Registration failed: {str(e)}")
|
335 |
if isinstance(e, HTTPException):
|
@@ -341,104 +249,178 @@ async def register(request: Request, user: UserCreate):
|
|
341 |
|
342 |
@router.post("/login", response_model=Token)
|
343 |
async def login(
|
344 |
-
request: Request,
|
345 |
form_data: OAuth2PasswordRequestForm = Depends()
|
346 |
):
|
347 |
-
|
348 |
-
|
349 |
-
user = request.app.db_conn.execute(query, (form_data.username,)).fetchone()
|
350 |
|
351 |
-
|
352 |
-
|
353 |
-
|
354 |
-
|
355 |
-
|
|
|
|
|
|
|
356 |
)
|
357 |
|
358 |
-
|
359 |
-
|
360 |
-
|
361 |
-
|
|
|
|
|
|
|
362 |
|
363 |
-
|
364 |
-
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
365 |
-
access_token = create_access_token(
|
366 |
-
data={"sub": user[0]},
|
367 |
-
expires_delta=access_token_expires
|
368 |
-
)
|
369 |
|
370 |
-
|
371 |
-
|
372 |
-
|
373 |
-
|
374 |
-
|
|
|
|
|
|
|
375 |
|
376 |
-
|
377 |
-
|
378 |
-
|
379 |
-
|
380 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
381 |
|
382 |
@router.post("/refresh", response_model=Token)
|
383 |
-
async def refresh_token(
|
|
|
|
|
|
|
384 |
try:
|
385 |
-
|
386 |
-
|
387 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
388 |
raise HTTPException(
|
389 |
status_code=status.HTTP_401_UNAUTHORIZED,
|
390 |
detail="Invalid refresh token",
|
391 |
headers={"WWW-Authenticate": "Bearer"},
|
392 |
)
|
393 |
-
except JWTError:
|
394 |
-
raise HTTPException(
|
395 |
-
status_code=status.HTTP_401_UNAUTHORIZED,
|
396 |
-
detail="Invalid refresh token",
|
397 |
-
headers={"WWW-Authenticate": "Bearer"},
|
398 |
-
)
|
399 |
|
400 |
-
|
401 |
-
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
406 |
-
|
407 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
408 |
)
|
409 |
|
410 |
-
|
411 |
-
|
412 |
-
|
413 |
-
|
414 |
-
|
415 |
-
)
|
416 |
|
417 |
-
|
418 |
-
new_refresh_token = create_refresh_token(
|
419 |
-
data={"sub": user_id},
|
420 |
-
expires_delta=refresh_token_expires
|
421 |
-
)
|
422 |
|
423 |
-
|
424 |
-
|
425 |
-
|
426 |
-
|
427 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
428 |
|
429 |
@router.get("/me", response_model=UserResponse)
|
430 |
-
async def get_current_user_info(
|
431 |
-
|
432 |
-
|
433 |
-
|
434 |
-
|
435 |
-
|
436 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
437 |
|
438 |
@router.get("/test-db")
|
439 |
async def test_database(request: Request):
|
440 |
"""
|
441 |
-
Test endpoint to verify database operations.
|
442 |
This is for debugging purposes only.
|
443 |
"""
|
444 |
import logging
|
@@ -657,3 +639,152 @@ async def test_database(request: Request):
|
|
657 |
"error": str(e)
|
658 |
})
|
659 |
return results
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
from passlib.context import CryptContext
|
4 |
from jose import JWTError, jwt
|
5 |
from datetime import datetime, timedelta
|
6 |
+
from typing import Optional, Dict, Any, List
|
7 |
import os
|
8 |
+
import time
|
9 |
+
import logging
|
10 |
from pydantic import BaseModel, EmailStr
|
11 |
|
12 |
+
# Import the HTTP API database utility
|
13 |
+
from app.utils import db_http
|
14 |
+
|
15 |
+
# Configure logging
|
16 |
+
logger = logging.getLogger("auth-server")
|
17 |
+
|
18 |
router = APIRouter()
|
19 |
|
20 |
# Password hashing
|
|
|
96 |
except JWTError:
|
97 |
raise credentials_exception
|
98 |
|
99 |
+
# Use the HTTP API utility to get the user
|
100 |
+
operation_id = f"get_user_{int(time.time())}"
|
101 |
+
|
102 |
+
try:
|
103 |
+
# First try with the app's db_conn if it exists and is not HTTP API
|
104 |
+
if hasattr(request.app, "db_conn") and getattr(request.app, "db_type", "") != "http-api":
|
105 |
+
logger.info(f"[{operation_id}] Using app.db_conn to get user")
|
106 |
+
query = "SELECT * FROM users WHERE id = ?"
|
107 |
+
user = request.app.db_conn.execute(query, (token_data.user_id,)).fetchone()
|
108 |
+
if user is not None:
|
109 |
+
return user
|
110 |
+
|
111 |
+
# If that fails or is not available, use the HTTP API
|
112 |
+
logger.info(f"[{operation_id}] Using HTTP API to get user")
|
113 |
+
user = db_http.get_record_by_id("users", token_data.user_id, operation_id=operation_id)
|
114 |
+
|
115 |
+
if user is None:
|
116 |
+
logger.error(f"[{operation_id}] User not found with ID: {token_data.user_id}")
|
117 |
+
raise credentials_exception
|
118 |
+
|
119 |
+
# If user is a dict (from HTTP API), convert it to a tuple-like structure
|
120 |
+
# to maintain compatibility with existing code
|
121 |
+
if isinstance(user, dict):
|
122 |
+
user_tuple = (
|
123 |
+
user.get("id"),
|
124 |
+
user.get("email"),
|
125 |
+
user.get("hashed_password"),
|
126 |
+
user.get("created_at"),
|
127 |
+
user.get("last_login"),
|
128 |
+
user.get("is_admin", 0)
|
129 |
+
)
|
130 |
+
return user_tuple
|
131 |
+
|
132 |
+
return user
|
133 |
+
except Exception as e:
|
134 |
+
logger.error(f"[{operation_id}] Error getting user: {str(e)}")
|
135 |
raise credentials_exception
|
|
|
136 |
|
137 |
# Routes
|
138 |
@router.post("/register", response_model=UserResponse)
|
139 |
async def register(request: Request, user: UserCreate):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
140 |
# Generate a unique identifier for this registration attempt
|
141 |
registration_id = f"reg_{int(time.time())}"
|
142 |
logger.info(f"[{registration_id}] Starting registration for email: {user.email}")
|
143 |
|
144 |
try:
|
145 |
+
# Step 1: Check if user already exists using HTTP API
|
146 |
+
logger.info(f"[{registration_id}] Checking if user already exists using HTTP API")
|
147 |
+
|
148 |
+
# Check if user exists
|
149 |
+
user_exists = db_http.record_exists(
|
150 |
+
"users",
|
151 |
+
"email = ?",
|
152 |
+
[{"type": "text", "value": user.email}],
|
153 |
+
operation_id=registration_id
|
154 |
+
)
|
155 |
+
|
156 |
+
if user_exists:
|
157 |
+
logger.warning(f"[{registration_id}] User with email {user.email} already exists")
|
158 |
+
raise HTTPException(
|
159 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
160 |
+
detail="Email already registered"
|
161 |
+
)
|
162 |
+
|
163 |
+
logger.info(f"[{registration_id}] User does not exist, proceeding with registration")
|
|
|
|
|
|
|
|
|
164 |
|
165 |
# Step 2: Hash the password
|
166 |
logger.info(f"[{registration_id}] Hashing password")
|
|
|
174 |
detail=f"Error hashing password: {str(e)}"
|
175 |
)
|
176 |
|
177 |
+
# Step 3: Insert the new user using HTTP API
|
178 |
+
logger.info(f"[{registration_id}] Inserting new user using HTTP API")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
179 |
|
180 |
+
# Prepare user data
|
181 |
+
user_data = {
|
182 |
+
"email": user.email,
|
183 |
+
"hashed_password": hashed_password,
|
184 |
+
"is_admin": 0 # Default to non-admin
|
185 |
+
}
|
186 |
|
187 |
+
# Insert the user
|
188 |
+
user_id = db_http.insert_record("users", user_data, operation_id=registration_id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
189 |
|
190 |
+
if not user_id:
|
191 |
+
logger.error(f"[{registration_id}] Failed to insert user")
|
192 |
+
raise HTTPException(
|
193 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
194 |
+
detail="Failed to create user account"
|
195 |
+
)
|
196 |
|
197 |
+
logger.info(f"[{registration_id}] User inserted successfully with ID: {user_id}")
|
|
|
|
|
|
|
198 |
|
199 |
+
# Step 4: Retrieve the full user record
|
200 |
+
logger.info(f"[{registration_id}] Retrieving user record")
|
|
|
|
|
201 |
|
202 |
+
new_user = db_http.get_record_by_id("users", user_id, operation_id=registration_id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
203 |
|
|
|
204 |
if not new_user:
|
205 |
+
logger.warning(f"[{registration_id}] User not found after insert, trying by email")
|
206 |
+
|
207 |
+
# Try to get by email as fallback
|
208 |
+
users = db_http.select_records(
|
209 |
+
"users",
|
210 |
+
condition="email = ?",
|
211 |
+
condition_params=[{"type": "text", "value": user.email}],
|
212 |
+
limit=1,
|
213 |
+
operation_id=registration_id
|
214 |
+
)
|
215 |
+
|
216 |
+
if users:
|
217 |
+
new_user = users[0]
|
218 |
+
user_id = new_user.get("id")
|
219 |
+
logger.info(f"[{registration_id}] Found user by email with ID: {user_id}")
|
220 |
+
else:
|
221 |
+
logger.error(f"[{registration_id}] User not found by email either")
|
222 |
+
|
223 |
+
# Return minimal response with what we know
|
224 |
+
return {
|
225 |
+
"id": user_id or 0,
|
226 |
+
"email": user.email,
|
227 |
+
"is_admin": False,
|
228 |
+
"created_at": datetime.now().isoformat()
|
229 |
+
}
|
230 |
+
|
231 |
+
# Step 5: Construct the response
|
232 |
+
logger.info(f"[{registration_id}] Registration successful for user ID: {user_id}")
|
233 |
+
|
234 |
+
return {
|
235 |
+
"id": new_user.get("id", user_id),
|
236 |
+
"email": new_user.get("email", user.email),
|
237 |
+
"is_admin": bool(new_user.get("is_admin", 0)),
|
238 |
+
"created_at": str(new_user.get("created_at", datetime.now().isoformat()))
|
239 |
+
}
|
240 |
+
|
|
|
|
|
241 |
except Exception as e:
|
242 |
logger.error(f"[{registration_id}] Registration failed: {str(e)}")
|
243 |
if isinstance(e, HTTPException):
|
|
|
249 |
|
250 |
@router.post("/login", response_model=Token)
|
251 |
async def login(
|
|
|
252 |
form_data: OAuth2PasswordRequestForm = Depends()
|
253 |
):
|
254 |
+
operation_id = f"login_{int(time.time())}"
|
255 |
+
logger.info(f"[{operation_id}] Login attempt for email: {form_data.username}")
|
|
|
256 |
|
257 |
+
try:
|
258 |
+
# Find the user using HTTP API
|
259 |
+
users = db_http.select_records(
|
260 |
+
"users",
|
261 |
+
condition="email = ?",
|
262 |
+
condition_params=[{"type": "text", "value": form_data.username}],
|
263 |
+
limit=1,
|
264 |
+
operation_id=operation_id
|
265 |
)
|
266 |
|
267 |
+
if not users:
|
268 |
+
logger.warning(f"[{operation_id}] User not found with email: {form_data.username}")
|
269 |
+
raise HTTPException(
|
270 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
271 |
+
detail="Incorrect email or password",
|
272 |
+
headers={"WWW-Authenticate": "Bearer"},
|
273 |
+
)
|
274 |
|
275 |
+
user = users[0]
|
|
|
|
|
|
|
|
|
|
|
276 |
|
277 |
+
# Verify password
|
278 |
+
if not verify_password(form_data.password, user.get("hashed_password")):
|
279 |
+
logger.warning(f"[{operation_id}] Invalid password for user: {form_data.username}")
|
280 |
+
raise HTTPException(
|
281 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
282 |
+
detail="Incorrect email or password",
|
283 |
+
headers={"WWW-Authenticate": "Bearer"},
|
284 |
+
)
|
285 |
|
286 |
+
logger.info(f"[{operation_id}] Password verified for user ID: {user.get('id')}")
|
287 |
+
|
288 |
+
# Update last login
|
289 |
+
update_data = {
|
290 |
+
"last_login": datetime.now().isoformat()
|
291 |
+
}
|
292 |
+
|
293 |
+
db_http.update_record(
|
294 |
+
"users",
|
295 |
+
update_data,
|
296 |
+
"id = ?",
|
297 |
+
[{"type": "integer", "value": str(user.get("id"))}],
|
298 |
+
operation_id=operation_id
|
299 |
+
)
|
300 |
+
|
301 |
+
logger.info(f"[{operation_id}] Last login updated for user ID: {user.get('id')}")
|
302 |
+
|
303 |
+
# Create tokens
|
304 |
+
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
305 |
+
access_token = create_access_token(
|
306 |
+
data={"sub": user.get("id")},
|
307 |
+
expires_delta=access_token_expires
|
308 |
+
)
|
309 |
+
|
310 |
+
refresh_token_expires = timedelta(days=REFRESH_TOKEN_EXPIRE_DAYS)
|
311 |
+
refresh_token = create_refresh_token(
|
312 |
+
data={"sub": user.get("id")},
|
313 |
+
expires_delta=refresh_token_expires
|
314 |
+
)
|
315 |
+
|
316 |
+
logger.info(f"[{operation_id}] Login successful for user ID: {user.get('id')}")
|
317 |
+
|
318 |
+
return {
|
319 |
+
"access_token": access_token,
|
320 |
+
"refresh_token": refresh_token,
|
321 |
+
"token_type": "bearer"
|
322 |
+
}
|
323 |
+
except Exception as e:
|
324 |
+
if isinstance(e, HTTPException):
|
325 |
+
raise
|
326 |
+
logger.error(f"[{operation_id}] Login failed: {str(e)}")
|
327 |
+
raise HTTPException(
|
328 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
329 |
+
detail=f"Login failed: {str(e)}"
|
330 |
+
)
|
331 |
|
332 |
@router.post("/refresh", response_model=Token)
|
333 |
+
async def refresh_token(refresh_token_str: str):
|
334 |
+
operation_id = f"refresh_{int(time.time())}"
|
335 |
+
logger.info(f"[{operation_id}] Token refresh attempt")
|
336 |
+
|
337 |
try:
|
338 |
+
# Decode and validate the refresh token
|
339 |
+
try:
|
340 |
+
payload = jwt.decode(refresh_token_str, SECRET_KEY, algorithms=[ALGORITHM])
|
341 |
+
user_id: int = payload.get("sub")
|
342 |
+
if user_id is None:
|
343 |
+
logger.warning(f"[{operation_id}] Missing user ID in token payload")
|
344 |
+
raise HTTPException(
|
345 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
346 |
+
detail="Invalid refresh token",
|
347 |
+
headers={"WWW-Authenticate": "Bearer"},
|
348 |
+
)
|
349 |
+
except JWTError as e:
|
350 |
+
logger.warning(f"[{operation_id}] JWT decode error: {str(e)}")
|
351 |
raise HTTPException(
|
352 |
status_code=status.HTTP_401_UNAUTHORIZED,
|
353 |
detail="Invalid refresh token",
|
354 |
headers={"WWW-Authenticate": "Bearer"},
|
355 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
356 |
|
357 |
+
logger.info(f"[{operation_id}] Token decoded successfully for user ID: {user_id}")
|
358 |
+
|
359 |
+
# Check if user exists using HTTP API
|
360 |
+
user = db_http.get_record_by_id("users", user_id, operation_id=operation_id)
|
361 |
+
|
362 |
+
if not user:
|
363 |
+
logger.warning(f"[{operation_id}] User not found with ID: {user_id}")
|
364 |
+
raise HTTPException(
|
365 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
366 |
+
detail="User not found",
|
367 |
+
headers={"WWW-Authenticate": "Bearer"},
|
368 |
+
)
|
369 |
+
|
370 |
+
logger.info(f"[{operation_id}] User found with ID: {user_id}")
|
371 |
+
|
372 |
+
# Create new tokens
|
373 |
+
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
374 |
+
access_token = create_access_token(
|
375 |
+
data={"sub": user_id},
|
376 |
+
expires_delta=access_token_expires
|
377 |
)
|
378 |
|
379 |
+
refresh_token_expires = timedelta(days=REFRESH_TOKEN_EXPIRE_DAYS)
|
380 |
+
new_refresh_token = create_refresh_token(
|
381 |
+
data={"sub": user_id},
|
382 |
+
expires_delta=refresh_token_expires
|
383 |
+
)
|
|
|
384 |
|
385 |
+
logger.info(f"[{operation_id}] Token refresh successful for user ID: {user_id}")
|
|
|
|
|
|
|
|
|
386 |
|
387 |
+
return {
|
388 |
+
"access_token": access_token,
|
389 |
+
"refresh_token": new_refresh_token,
|
390 |
+
"token_type": "bearer"
|
391 |
+
}
|
392 |
+
except Exception as e:
|
393 |
+
if isinstance(e, HTTPException):
|
394 |
+
raise
|
395 |
+
logger.error(f"[{operation_id}] Token refresh failed: {str(e)}")
|
396 |
+
raise HTTPException(
|
397 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
398 |
+
detail=f"Token refresh failed: {str(e)}"
|
399 |
+
)
|
400 |
|
401 |
@router.get("/me", response_model=UserResponse)
|
402 |
+
async def get_current_user_info(current_user = Depends(get_current_user)):
|
403 |
+
# Handle both tuple (from db_conn) and dict (from HTTP API) formats
|
404 |
+
if isinstance(current_user, dict):
|
405 |
+
return {
|
406 |
+
"id": current_user.get("id"),
|
407 |
+
"email": current_user.get("email"),
|
408 |
+
"is_admin": bool(current_user.get("is_admin", 0)),
|
409 |
+
"created_at": str(current_user.get("created_at", ""))
|
410 |
+
}
|
411 |
+
else:
|
412 |
+
# Assume tuple format from the original implementation
|
413 |
+
return {
|
414 |
+
"id": current_user[0],
|
415 |
+
"email": current_user[1],
|
416 |
+
"is_admin": bool(current_user[5] if current_user[5] is not None else 0),
|
417 |
+
"created_at": str(current_user[3] if current_user[3] is not None else "")
|
418 |
+
}
|
419 |
|
420 |
@router.get("/test-db")
|
421 |
async def test_database(request: Request):
|
422 |
"""
|
423 |
+
Test endpoint to verify database operations using the original method.
|
424 |
This is for debugging purposes only.
|
425 |
"""
|
426 |
import logging
|
|
|
639 |
"error": str(e)
|
640 |
})
|
641 |
return results
|
642 |
+
|
643 |
+
@router.get("/test-db-http")
|
644 |
+
async def test_database_http():
|
645 |
+
"""
|
646 |
+
Test endpoint to verify database operations using the HTTP API method.
|
647 |
+
This is for debugging purposes only.
|
648 |
+
"""
|
649 |
+
operation_id = f"test_db_http_{int(time.time())}"
|
650 |
+
logger.info(f"[{operation_id}] Starting HTTP API database test")
|
651 |
+
|
652 |
+
results = {
|
653 |
+
"connection_type": "http-api",
|
654 |
+
"operations": []
|
655 |
+
}
|
656 |
+
|
657 |
+
try:
|
658 |
+
# Test 1: Simple SELECT
|
659 |
+
logger.info(f"[{operation_id}] Test 1: Simple SELECT")
|
660 |
+
result = db_http.execute_query("SELECT 1 as test", operation_id=f"{operation_id}_1")
|
661 |
+
results["operations"].append({
|
662 |
+
"name": "Simple SELECT",
|
663 |
+
"success": True,
|
664 |
+
"result": str(result)
|
665 |
+
})
|
666 |
+
logger.info(f"[{operation_id}] Test 1 result: {result}")
|
667 |
+
|
668 |
+
# Test 2: Create a temporary table
|
669 |
+
logger.info(f"[{operation_id}] Test 2: Create a temporary table")
|
670 |
+
db_http.create_table_if_not_exists(
|
671 |
+
"test_table_http",
|
672 |
+
"""
|
673 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
674 |
+
value TEXT NOT NULL,
|
675 |
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
676 |
+
""",
|
677 |
+
operation_id=f"{operation_id}_2"
|
678 |
+
)
|
679 |
+
results["operations"].append({
|
680 |
+
"name": "Create temporary table",
|
681 |
+
"success": True
|
682 |
+
})
|
683 |
+
logger.info(f"[{operation_id}] Test 2 completed successfully")
|
684 |
+
|
685 |
+
# Test 3: Insert into the temporary table
|
686 |
+
logger.info(f"[{operation_id}] Test 3: Insert into the temporary table")
|
687 |
+
test_value = f"test_value_http_{int(time.time())}"
|
688 |
+
|
689 |
+
insert_id = db_http.insert_record(
|
690 |
+
"test_table_http",
|
691 |
+
{"value": test_value},
|
692 |
+
operation_id=f"{operation_id}_3"
|
693 |
+
)
|
694 |
+
|
695 |
+
results["operations"].append({
|
696 |
+
"name": "Insert into temporary table",
|
697 |
+
"success": insert_id is not None,
|
698 |
+
"insert_id": insert_id
|
699 |
+
})
|
700 |
+
logger.info(f"[{operation_id}] Test 3 completed successfully. Insert ID: {insert_id}")
|
701 |
+
|
702 |
+
# Test 4: Select from the temporary table
|
703 |
+
logger.info(f"[{operation_id}] Test 4: Select from the temporary table")
|
704 |
+
records = db_http.select_records(
|
705 |
+
"test_table_http",
|
706 |
+
condition="value = ?",
|
707 |
+
condition_params=[{"type": "text", "value": test_value}],
|
708 |
+
limit=1,
|
709 |
+
operation_id=f"{operation_id}_4"
|
710 |
+
)
|
711 |
+
|
712 |
+
results["operations"].append({
|
713 |
+
"name": "Select from temporary table",
|
714 |
+
"success": len(records) > 0,
|
715 |
+
"result": str(records[0]) if records else None
|
716 |
+
})
|
717 |
+
logger.info(f"[{operation_id}] Test 4 result: {records}")
|
718 |
+
|
719 |
+
# Test 5: Update the record
|
720 |
+
logger.info(f"[{operation_id}] Test 5: Update the record")
|
721 |
+
updated_value = f"{test_value}_updated"
|
722 |
+
|
723 |
+
success = db_http.update_record(
|
724 |
+
"test_table_http",
|
725 |
+
{"value": updated_value},
|
726 |
+
"id = ?",
|
727 |
+
[{"type": "integer", "value": str(insert_id)}],
|
728 |
+
operation_id=f"{operation_id}_5"
|
729 |
+
)
|
730 |
+
|
731 |
+
results["operations"].append({
|
732 |
+
"name": "Update record",
|
733 |
+
"success": success
|
734 |
+
})
|
735 |
+
logger.info(f"[{operation_id}] Test 5 completed successfully")
|
736 |
+
|
737 |
+
# Test 6: Verify the update
|
738 |
+
logger.info(f"[{operation_id}] Test 6: Verify the update")
|
739 |
+
updated_records = db_http.select_records(
|
740 |
+
"test_table_http",
|
741 |
+
condition="id = ?",
|
742 |
+
condition_params=[{"type": "integer", "value": str(insert_id)}],
|
743 |
+
limit=1,
|
744 |
+
operation_id=f"{operation_id}_6"
|
745 |
+
)
|
746 |
+
|
747 |
+
results["operations"].append({
|
748 |
+
"name": "Verify update",
|
749 |
+
"success": len(updated_records) > 0 and updated_records[0].get("value") == updated_value,
|
750 |
+
"result": str(updated_records[0]) if updated_records else None
|
751 |
+
})
|
752 |
+
logger.info(f"[{operation_id}] Test 6 result: {updated_records}")
|
753 |
+
|
754 |
+
# Test 7: Count records
|
755 |
+
logger.info(f"[{operation_id}] Test 7: Count records")
|
756 |
+
count = db_http.count_records("test_table_http", operation_id=f"{operation_id}_7")
|
757 |
+
|
758 |
+
results["operations"].append({
|
759 |
+
"name": "Count records",
|
760 |
+
"success": count > 0,
|
761 |
+
"count": count
|
762 |
+
})
|
763 |
+
logger.info(f"[{operation_id}] Test 7 result: {count}")
|
764 |
+
|
765 |
+
# Test 8: List all records
|
766 |
+
logger.info(f"[{operation_id}] Test 8: List all records")
|
767 |
+
all_records = db_http.select_records(
|
768 |
+
"test_table_http",
|
769 |
+
limit=10,
|
770 |
+
operation_id=f"{operation_id}_8"
|
771 |
+
)
|
772 |
+
|
773 |
+
results["operations"].append({
|
774 |
+
"name": "List all records",
|
775 |
+
"success": True,
|
776 |
+
"count": len(all_records),
|
777 |
+
"records": all_records
|
778 |
+
})
|
779 |
+
logger.info(f"[{operation_id}] Test 8 result: {len(all_records)} records")
|
780 |
+
|
781 |
+
logger.info(f"[{operation_id}] HTTP API database test completed successfully")
|
782 |
+
return results
|
783 |
+
except Exception as e:
|
784 |
+
logger.error(f"[{operation_id}] HTTP API database test failed: {str(e)}")
|
785 |
+
results["operations"].append({
|
786 |
+
"name": "Error during tests",
|
787 |
+
"success": False,
|
788 |
+
"error": str(e)
|
789 |
+
})
|
790 |
+
return results
|
app/api/routes/projects_router.py
CHANGED
@@ -1,8 +1,14 @@
|
|
1 |
-
from fastapi import APIRouter, HTTPException, Depends,
|
2 |
from typing import List, Optional
|
3 |
from pydantic import BaseModel
|
4 |
import json
|
|
|
|
|
5 |
from .auth_router import get_current_user
|
|
|
|
|
|
|
|
|
6 |
|
7 |
router = APIRouter()
|
8 |
|
@@ -28,54 +34,106 @@ class ProjectResponse(ProjectBase):
|
|
28 |
# Routes
|
29 |
@router.post("/", response_model=ProjectResponse)
|
30 |
async def create_project(
|
31 |
-
request: Request,
|
32 |
project: ProjectCreate,
|
33 |
current_user = Depends(get_current_user)
|
34 |
):
|
|
|
|
|
|
|
35 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
# Validate GeoJSON if provided
|
37 |
if project.geojson:
|
38 |
try:
|
39 |
geojson_data = json.loads(project.geojson)
|
40 |
# Basic validation
|
41 |
if not isinstance(geojson_data, dict) or "type" not in geojson_data:
|
|
|
42 |
raise ValueError("Invalid GeoJSON format")
|
43 |
except json.JSONDecodeError:
|
|
|
44 |
raise HTTPException(
|
45 |
status_code=status.HTTP_400_BAD_REQUEST,
|
46 |
detail="Invalid GeoJSON format"
|
47 |
)
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
60 |
# Get the newly created project
|
61 |
-
new_project =
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
66 |
return {
|
67 |
-
"id": new_project
|
68 |
-
"owner_id": new_project
|
69 |
-
"title": new_project
|
70 |
-
"description": new_project
|
71 |
-
"geojson": new_project
|
72 |
-
"storage_bucket": new_project
|
73 |
-
"created_at": new_project
|
74 |
-
"updated_at": new_project
|
75 |
}
|
76 |
except HTTPException:
|
77 |
raise
|
78 |
except Exception as e:
|
|
|
79 |
raise HTTPException(
|
80 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
81 |
detail=str(e)
|
@@ -83,38 +141,53 @@ async def create_project(
|
|
83 |
|
84 |
@router.get("/", response_model=List[ProjectResponse])
|
85 |
async def get_projects(
|
86 |
-
request: Request,
|
87 |
current_user = Depends(get_current_user),
|
88 |
skip: int = 0,
|
89 |
limit: int = 100
|
90 |
):
|
|
|
|
|
|
|
91 |
try:
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
result = []
|
104 |
for project in projects:
|
105 |
result.append({
|
106 |
-
"id": project
|
107 |
-
"owner_id": project
|
108 |
-
"title": project
|
109 |
-
"description": project
|
110 |
-
"geojson": project
|
111 |
-
"storage_bucket": project
|
112 |
-
"created_at": project
|
113 |
-
"updated_at": project
|
114 |
})
|
115 |
-
|
116 |
return result
|
117 |
except Exception as e:
|
|
|
118 |
raise HTTPException(
|
119 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
120 |
detail=str(e)
|
@@ -122,36 +195,57 @@ async def get_projects(
|
|
122 |
|
123 |
@router.get("/{project_id}", response_model=ProjectResponse)
|
124 |
async def get_project(
|
125 |
-
request: Request,
|
126 |
project_id: int,
|
127 |
current_user = Depends(get_current_user)
|
128 |
):
|
|
|
|
|
|
|
129 |
try:
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
137 |
raise HTTPException(
|
138 |
status_code=status.HTTP_404_NOT_FOUND,
|
139 |
detail="Project not found"
|
140 |
)
|
141 |
-
|
|
|
|
|
|
|
142 |
return {
|
143 |
-
"id": project
|
144 |
-
"owner_id": project
|
145 |
-
"title": project
|
146 |
-
"description": project
|
147 |
-
"geojson": project
|
148 |
-
"storage_bucket": project
|
149 |
-
"created_at": project
|
150 |
-
"updated_at": project
|
151 |
}
|
152 |
except HTTPException:
|
153 |
raise
|
154 |
except Exception as e:
|
|
|
155 |
raise HTTPException(
|
156 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
157 |
detail=str(e)
|
@@ -159,98 +253,132 @@ async def get_project(
|
|
159 |
|
160 |
@router.patch("/{project_id}", response_model=ProjectResponse)
|
161 |
async def update_project(
|
162 |
-
request: Request,
|
163 |
project_id: int,
|
164 |
project_update: ProjectUpdate,
|
165 |
current_user = Depends(get_current_user)
|
166 |
):
|
|
|
|
|
|
|
167 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
168 |
# Check if project exists and belongs to user
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
176 |
raise HTTPException(
|
177 |
status_code=status.HTTP_404_NOT_FOUND,
|
178 |
detail="Project not found"
|
179 |
)
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
|
|
|
|
185 |
if project_update.title is not None:
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
if project_update.description is not None:
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
if project_update.geojson is not None:
|
194 |
# Validate GeoJSON
|
195 |
try:
|
196 |
geojson_data = json.loads(project_update.geojson)
|
197 |
if not isinstance(geojson_data, dict) or "type" not in geojson_data:
|
|
|
198 |
raise ValueError("Invalid GeoJSON format")
|
199 |
except json.JSONDecodeError:
|
|
|
200 |
raise HTTPException(
|
201 |
status_code=status.HTTP_400_BAD_REQUEST,
|
202 |
detail="Invalid GeoJSON format"
|
203 |
)
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
# Add updated_at field
|
209 |
-
|
210 |
-
|
211 |
# If no fields to update, return the existing project
|
212 |
-
if
|
|
|
213 |
return {
|
214 |
-
"id": existing_project
|
215 |
-
"owner_id": existing_project
|
216 |
-
"title": existing_project
|
217 |
-
"description": existing_project
|
218 |
-
"geojson": existing_project
|
219 |
-
"storage_bucket": existing_project
|
220 |
-
"created_at": existing_project
|
221 |
-
"updated_at": existing_project
|
222 |
}
|
223 |
-
|
224 |
-
# Update the project
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
235 |
# Get the updated project
|
236 |
-
updated_project =
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
|
|
|
|
|
|
|
|
241 |
return {
|
242 |
-
"id": updated_project
|
243 |
-
"owner_id": updated_project
|
244 |
-
"title": updated_project
|
245 |
-
"description": updated_project
|
246 |
-
"geojson": updated_project
|
247 |
-
"storage_bucket": updated_project
|
248 |
-
"created_at": updated_project
|
249 |
-
"updated_at": updated_project
|
250 |
}
|
251 |
except HTTPException:
|
252 |
raise
|
253 |
except Exception as e:
|
|
|
254 |
raise HTTPException(
|
255 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
256 |
detail=str(e)
|
@@ -258,33 +386,64 @@ async def update_project(
|
|
258 |
|
259 |
@router.delete("/{project_id}", status_code=status.HTTP_204_NO_CONTENT)
|
260 |
async def delete_project(
|
261 |
-
request: Request,
|
262 |
project_id: int,
|
263 |
current_user = Depends(get_current_user)
|
264 |
):
|
|
|
|
|
|
|
265 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
266 |
# Check if project exists and belongs to user
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
274 |
raise HTTPException(
|
275 |
status_code=status.HTTP_404_NOT_FOUND,
|
276 |
detail="Project not found"
|
277 |
)
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
284 |
return None
|
285 |
except HTTPException:
|
286 |
raise
|
287 |
except Exception as e:
|
|
|
288 |
raise HTTPException(
|
289 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
290 |
detail=str(e)
|
|
|
1 |
+
from fastapi import APIRouter, HTTPException, Depends, status
|
2 |
from typing import List, Optional
|
3 |
from pydantic import BaseModel
|
4 |
import json
|
5 |
+
import time
|
6 |
+
import logging
|
7 |
from .auth_router import get_current_user
|
8 |
+
from app.utils import db_http
|
9 |
+
|
10 |
+
# Configure logging
|
11 |
+
logger = logging.getLogger("auth-server")
|
12 |
|
13 |
router = APIRouter()
|
14 |
|
|
|
34 |
# Routes
|
35 |
@router.post("/", response_model=ProjectResponse)
|
36 |
async def create_project(
|
|
|
37 |
project: ProjectCreate,
|
38 |
current_user = Depends(get_current_user)
|
39 |
):
|
40 |
+
operation_id = f"create_project_{int(time.time())}"
|
41 |
+
logger.info(f"[{operation_id}] Creating new project: {project.title}")
|
42 |
+
|
43 |
try:
|
44 |
+
# Get user ID based on the type of current_user
|
45 |
+
if isinstance(current_user, dict):
|
46 |
+
user_id = current_user.get("id")
|
47 |
+
else:
|
48 |
+
user_id = current_user[0]
|
49 |
+
|
50 |
+
logger.info(f"[{operation_id}] User ID: {user_id}")
|
51 |
+
|
52 |
# Validate GeoJSON if provided
|
53 |
if project.geojson:
|
54 |
try:
|
55 |
geojson_data = json.loads(project.geojson)
|
56 |
# Basic validation
|
57 |
if not isinstance(geojson_data, dict) or "type" not in geojson_data:
|
58 |
+
logger.warning(f"[{operation_id}] Invalid GeoJSON format")
|
59 |
raise ValueError("Invalid GeoJSON format")
|
60 |
except json.JSONDecodeError:
|
61 |
+
logger.warning(f"[{operation_id}] Invalid GeoJSON format (JSON decode error)")
|
62 |
raise HTTPException(
|
63 |
status_code=status.HTTP_400_BAD_REQUEST,
|
64 |
detail="Invalid GeoJSON format"
|
65 |
)
|
66 |
+
|
67 |
+
logger.info(f"[{operation_id}] GeoJSON validation passed")
|
68 |
+
|
69 |
+
# Prepare project data
|
70 |
+
project_data = {
|
71 |
+
"owner_id": user_id,
|
72 |
+
"title": project.title,
|
73 |
+
"description": project.description,
|
74 |
+
"geojson": project.geojson,
|
75 |
+
"storage_bucket": "default" # Default storage bucket
|
76 |
+
}
|
77 |
+
|
78 |
+
# Insert the new project using HTTP API
|
79 |
+
logger.info(f"[{operation_id}] Inserting new project")
|
80 |
+
project_id = db_http.insert_record("projects", project_data, operation_id=operation_id)
|
81 |
+
|
82 |
+
if not project_id:
|
83 |
+
logger.error(f"[{operation_id}] Failed to insert project")
|
84 |
+
raise HTTPException(
|
85 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
86 |
+
detail="Failed to create project"
|
87 |
+
)
|
88 |
+
|
89 |
+
logger.info(f"[{operation_id}] Project inserted with ID: {project_id}")
|
90 |
+
|
91 |
# Get the newly created project
|
92 |
+
new_project = db_http.get_record_by_id("projects", project_id, operation_id=operation_id)
|
93 |
+
|
94 |
+
if not new_project:
|
95 |
+
logger.warning(f"[{operation_id}] Project not found after insert, trying by owner and title")
|
96 |
+
|
97 |
+
# Try to get by owner and title as fallback
|
98 |
+
projects = db_http.select_records(
|
99 |
+
"projects",
|
100 |
+
condition="owner_id = ? AND title = ?",
|
101 |
+
condition_params=[
|
102 |
+
{"type": "integer", "value": str(user_id)},
|
103 |
+
{"type": "text", "value": project.title}
|
104 |
+
],
|
105 |
+
order_by="id DESC",
|
106 |
+
limit=1,
|
107 |
+
operation_id=operation_id
|
108 |
+
)
|
109 |
+
|
110 |
+
if projects:
|
111 |
+
new_project = projects[0]
|
112 |
+
project_id = new_project.get("id")
|
113 |
+
logger.info(f"[{operation_id}] Found project by owner and title with ID: {project_id}")
|
114 |
+
else:
|
115 |
+
logger.error(f"[{operation_id}] Project not found after insert")
|
116 |
+
raise HTTPException(
|
117 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
118 |
+
detail="Project was created but could not be retrieved"
|
119 |
+
)
|
120 |
+
|
121 |
+
logger.info(f"[{operation_id}] Project created successfully with ID: {project_id}")
|
122 |
+
|
123 |
return {
|
124 |
+
"id": new_project.get("id"),
|
125 |
+
"owner_id": new_project.get("owner_id"),
|
126 |
+
"title": new_project.get("title"),
|
127 |
+
"description": new_project.get("description"),
|
128 |
+
"geojson": new_project.get("geojson"),
|
129 |
+
"storage_bucket": new_project.get("storage_bucket", "default"),
|
130 |
+
"created_at": new_project.get("created_at", ""),
|
131 |
+
"updated_at": new_project.get("updated_at", "")
|
132 |
}
|
133 |
except HTTPException:
|
134 |
raise
|
135 |
except Exception as e:
|
136 |
+
logger.error(f"[{operation_id}] Error creating project: {str(e)}")
|
137 |
raise HTTPException(
|
138 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
139 |
detail=str(e)
|
|
|
141 |
|
142 |
@router.get("/", response_model=List[ProjectResponse])
|
143 |
async def get_projects(
|
|
|
144 |
current_user = Depends(get_current_user),
|
145 |
skip: int = 0,
|
146 |
limit: int = 100
|
147 |
):
|
148 |
+
operation_id = f"get_projects_{int(time.time())}"
|
149 |
+
logger.info(f"[{operation_id}] Getting projects (skip={skip}, limit={limit})")
|
150 |
+
|
151 |
try:
|
152 |
+
# Get user ID based on the type of current_user
|
153 |
+
if isinstance(current_user, dict):
|
154 |
+
user_id = current_user.get("id")
|
155 |
+
else:
|
156 |
+
user_id = current_user[0]
|
157 |
+
|
158 |
+
logger.info(f"[{operation_id}] User ID: {user_id}")
|
159 |
+
|
160 |
+
# Get projects using HTTP API
|
161 |
+
projects = db_http.select_records(
|
162 |
+
"projects",
|
163 |
+
condition="owner_id = ?",
|
164 |
+
condition_params=[{"type": "integer", "value": str(user_id)}],
|
165 |
+
order_by="updated_at DESC",
|
166 |
+
limit=limit,
|
167 |
+
offset=skip,
|
168 |
+
operation_id=operation_id
|
169 |
+
)
|
170 |
+
|
171 |
+
logger.info(f"[{operation_id}] Found {len(projects)} projects")
|
172 |
+
|
173 |
+
# Projects are already in dictionary format from the HTTP API
|
174 |
+
# Just need to ensure all required fields are present
|
175 |
result = []
|
176 |
for project in projects:
|
177 |
result.append({
|
178 |
+
"id": project.get("id"),
|
179 |
+
"owner_id": project.get("owner_id"),
|
180 |
+
"title": project.get("title"),
|
181 |
+
"description": project.get("description"),
|
182 |
+
"geojson": project.get("geojson"),
|
183 |
+
"storage_bucket": project.get("storage_bucket", "default"),
|
184 |
+
"created_at": project.get("created_at", ""),
|
185 |
+
"updated_at": project.get("updated_at", "")
|
186 |
})
|
187 |
+
|
188 |
return result
|
189 |
except Exception as e:
|
190 |
+
logger.error(f"[{operation_id}] Error getting projects: {str(e)}")
|
191 |
raise HTTPException(
|
192 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
193 |
detail=str(e)
|
|
|
195 |
|
196 |
@router.get("/{project_id}", response_model=ProjectResponse)
|
197 |
async def get_project(
|
|
|
198 |
project_id: int,
|
199 |
current_user = Depends(get_current_user)
|
200 |
):
|
201 |
+
operation_id = f"get_project_{int(time.time())}"
|
202 |
+
logger.info(f"[{operation_id}] Getting project with ID: {project_id}")
|
203 |
+
|
204 |
try:
|
205 |
+
# Get user ID based on the type of current_user
|
206 |
+
if isinstance(current_user, dict):
|
207 |
+
user_id = current_user.get("id")
|
208 |
+
else:
|
209 |
+
user_id = current_user[0]
|
210 |
+
|
211 |
+
logger.info(f"[{operation_id}] User ID: {user_id}")
|
212 |
+
|
213 |
+
# Get project using HTTP API
|
214 |
+
projects = db_http.select_records(
|
215 |
+
"projects",
|
216 |
+
condition="id = ? AND owner_id = ?",
|
217 |
+
condition_params=[
|
218 |
+
{"type": "integer", "value": str(project_id)},
|
219 |
+
{"type": "integer", "value": str(user_id)}
|
220 |
+
],
|
221 |
+
limit=1,
|
222 |
+
operation_id=operation_id
|
223 |
+
)
|
224 |
+
|
225 |
+
if not projects:
|
226 |
+
logger.warning(f"[{operation_id}] Project not found with ID: {project_id}")
|
227 |
raise HTTPException(
|
228 |
status_code=status.HTTP_404_NOT_FOUND,
|
229 |
detail="Project not found"
|
230 |
)
|
231 |
+
|
232 |
+
project = projects[0]
|
233 |
+
logger.info(f"[{operation_id}] Found project: {project.get('title')}")
|
234 |
+
|
235 |
return {
|
236 |
+
"id": project.get("id"),
|
237 |
+
"owner_id": project.get("owner_id"),
|
238 |
+
"title": project.get("title"),
|
239 |
+
"description": project.get("description"),
|
240 |
+
"geojson": project.get("geojson"),
|
241 |
+
"storage_bucket": project.get("storage_bucket", "default"),
|
242 |
+
"created_at": project.get("created_at", ""),
|
243 |
+
"updated_at": project.get("updated_at", "")
|
244 |
}
|
245 |
except HTTPException:
|
246 |
raise
|
247 |
except Exception as e:
|
248 |
+
logger.error(f"[{operation_id}] Error getting project: {str(e)}")
|
249 |
raise HTTPException(
|
250 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
251 |
detail=str(e)
|
|
|
253 |
|
254 |
@router.patch("/{project_id}", response_model=ProjectResponse)
|
255 |
async def update_project(
|
|
|
256 |
project_id: int,
|
257 |
project_update: ProjectUpdate,
|
258 |
current_user = Depends(get_current_user)
|
259 |
):
|
260 |
+
operation_id = f"update_project_{int(time.time())}"
|
261 |
+
logger.info(f"[{operation_id}] Updating project with ID: {project_id}")
|
262 |
+
|
263 |
try:
|
264 |
+
# Get user ID based on the type of current_user
|
265 |
+
if isinstance(current_user, dict):
|
266 |
+
user_id = current_user.get("id")
|
267 |
+
else:
|
268 |
+
user_id = current_user[0]
|
269 |
+
|
270 |
+
logger.info(f"[{operation_id}] User ID: {user_id}")
|
271 |
+
|
272 |
# Check if project exists and belongs to user
|
273 |
+
projects = db_http.select_records(
|
274 |
+
"projects",
|
275 |
+
condition="id = ? AND owner_id = ?",
|
276 |
+
condition_params=[
|
277 |
+
{"type": "integer", "value": str(project_id)},
|
278 |
+
{"type": "integer", "value": str(user_id)}
|
279 |
+
],
|
280 |
+
limit=1,
|
281 |
+
operation_id=operation_id
|
282 |
+
)
|
283 |
+
|
284 |
+
if not projects:
|
285 |
+
logger.warning(f"[{operation_id}] Project not found with ID: {project_id}")
|
286 |
raise HTTPException(
|
287 |
status_code=status.HTTP_404_NOT_FOUND,
|
288 |
detail="Project not found"
|
289 |
)
|
290 |
+
|
291 |
+
existing_project = projects[0]
|
292 |
+
logger.info(f"[{operation_id}] Found project: {existing_project.get('title')}")
|
293 |
+
|
294 |
+
# Prepare update data
|
295 |
+
update_data = {}
|
296 |
+
|
297 |
if project_update.title is not None:
|
298 |
+
update_data["title"] = project_update.title
|
299 |
+
|
|
|
300 |
if project_update.description is not None:
|
301 |
+
update_data["description"] = project_update.description
|
302 |
+
|
|
|
303 |
if project_update.geojson is not None:
|
304 |
# Validate GeoJSON
|
305 |
try:
|
306 |
geojson_data = json.loads(project_update.geojson)
|
307 |
if not isinstance(geojson_data, dict) or "type" not in geojson_data:
|
308 |
+
logger.warning(f"[{operation_id}] Invalid GeoJSON format")
|
309 |
raise ValueError("Invalid GeoJSON format")
|
310 |
except json.JSONDecodeError:
|
311 |
+
logger.warning(f"[{operation_id}] Invalid GeoJSON format (JSON decode error)")
|
312 |
raise HTTPException(
|
313 |
status_code=status.HTTP_400_BAD_REQUEST,
|
314 |
detail="Invalid GeoJSON format"
|
315 |
)
|
316 |
+
|
317 |
+
update_data["geojson"] = project_update.geojson
|
318 |
+
|
|
|
319 |
# Add updated_at field
|
320 |
+
update_data["updated_at"] = time.strftime('%Y-%m-%d %H:%M:%S')
|
321 |
+
|
322 |
# If no fields to update, return the existing project
|
323 |
+
if len(update_data) <= 1: # Only updated_at
|
324 |
+
logger.info(f"[{operation_id}] No fields to update")
|
325 |
return {
|
326 |
+
"id": existing_project.get("id"),
|
327 |
+
"owner_id": existing_project.get("owner_id"),
|
328 |
+
"title": existing_project.get("title"),
|
329 |
+
"description": existing_project.get("description"),
|
330 |
+
"geojson": existing_project.get("geojson"),
|
331 |
+
"storage_bucket": existing_project.get("storage_bucket", "default"),
|
332 |
+
"created_at": existing_project.get("created_at", ""),
|
333 |
+
"updated_at": existing_project.get("updated_at", "")
|
334 |
}
|
335 |
+
|
336 |
+
# Update the project using HTTP API
|
337 |
+
logger.info(f"[{operation_id}] Updating project with data: {update_data}")
|
338 |
+
success = db_http.update_record(
|
339 |
+
"projects",
|
340 |
+
update_data,
|
341 |
+
"id = ? AND owner_id = ?",
|
342 |
+
[
|
343 |
+
{"type": "integer", "value": str(project_id)},
|
344 |
+
{"type": "integer", "value": str(user_id)}
|
345 |
+
],
|
346 |
+
operation_id=operation_id
|
347 |
+
)
|
348 |
+
|
349 |
+
if not success:
|
350 |
+
logger.error(f"[{operation_id}] Failed to update project")
|
351 |
+
raise HTTPException(
|
352 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
353 |
+
detail="Failed to update project"
|
354 |
+
)
|
355 |
+
|
356 |
+
logger.info(f"[{operation_id}] Project updated successfully")
|
357 |
+
|
358 |
# Get the updated project
|
359 |
+
updated_project = db_http.get_record_by_id("projects", project_id, operation_id=operation_id)
|
360 |
+
|
361 |
+
if not updated_project:
|
362 |
+
logger.warning(f"[{operation_id}] Updated project not found")
|
363 |
+
raise HTTPException(
|
364 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
365 |
+
detail="Project was updated but could not be retrieved"
|
366 |
+
)
|
367 |
+
|
368 |
return {
|
369 |
+
"id": updated_project.get("id"),
|
370 |
+
"owner_id": updated_project.get("owner_id"),
|
371 |
+
"title": updated_project.get("title"),
|
372 |
+
"description": updated_project.get("description"),
|
373 |
+
"geojson": updated_project.get("geojson"),
|
374 |
+
"storage_bucket": updated_project.get("storage_bucket", "default"),
|
375 |
+
"created_at": updated_project.get("created_at", ""),
|
376 |
+
"updated_at": updated_project.get("updated_at", "")
|
377 |
}
|
378 |
except HTTPException:
|
379 |
raise
|
380 |
except Exception as e:
|
381 |
+
logger.error(f"[{operation_id}] Error updating project: {str(e)}")
|
382 |
raise HTTPException(
|
383 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
384 |
detail=str(e)
|
|
|
386 |
|
387 |
@router.delete("/{project_id}", status_code=status.HTTP_204_NO_CONTENT)
|
388 |
async def delete_project(
|
|
|
389 |
project_id: int,
|
390 |
current_user = Depends(get_current_user)
|
391 |
):
|
392 |
+
operation_id = f"delete_project_{int(time.time())}"
|
393 |
+
logger.info(f"[{operation_id}] Deleting project with ID: {project_id}")
|
394 |
+
|
395 |
try:
|
396 |
+
# Get user ID based on the type of current_user
|
397 |
+
if isinstance(current_user, dict):
|
398 |
+
user_id = current_user.get("id")
|
399 |
+
else:
|
400 |
+
user_id = current_user[0]
|
401 |
+
|
402 |
+
logger.info(f"[{operation_id}] User ID: {user_id}")
|
403 |
+
|
404 |
# Check if project exists and belongs to user
|
405 |
+
projects = db_http.select_records(
|
406 |
+
"projects",
|
407 |
+
condition="id = ? AND owner_id = ?",
|
408 |
+
condition_params=[
|
409 |
+
{"type": "integer", "value": str(project_id)},
|
410 |
+
{"type": "integer", "value": str(user_id)}
|
411 |
+
],
|
412 |
+
limit=1,
|
413 |
+
operation_id=operation_id
|
414 |
+
)
|
415 |
+
|
416 |
+
if not projects:
|
417 |
+
logger.warning(f"[{operation_id}] Project not found with ID: {project_id}")
|
418 |
raise HTTPException(
|
419 |
status_code=status.HTTP_404_NOT_FOUND,
|
420 |
detail="Project not found"
|
421 |
)
|
422 |
+
|
423 |
+
logger.info(f"[{operation_id}] Found project to delete: {projects[0].get('title')}")
|
424 |
+
|
425 |
+
# Delete the project using HTTP API
|
426 |
+
success = db_http.delete_record(
|
427 |
+
"projects",
|
428 |
+
"id = ?",
|
429 |
+
[{"type": "integer", "value": str(project_id)}],
|
430 |
+
operation_id=operation_id
|
431 |
+
)
|
432 |
+
|
433 |
+
if not success:
|
434 |
+
logger.error(f"[{operation_id}] Failed to delete project")
|
435 |
+
raise HTTPException(
|
436 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
437 |
+
detail="Failed to delete project"
|
438 |
+
)
|
439 |
+
|
440 |
+
logger.info(f"[{operation_id}] Project deleted successfully")
|
441 |
+
|
442 |
return None
|
443 |
except HTTPException:
|
444 |
raise
|
445 |
except Exception as e:
|
446 |
+
logger.error(f"[{operation_id}] Error deleting project: {str(e)}")
|
447 |
raise HTTPException(
|
448 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
449 |
detail=str(e)
|
app/utils/db_http.py
ADDED
@@ -0,0 +1,578 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Database utility module for HTTP API operations with Turso database.
|
3 |
+
This module provides functions for interacting with the Turso database using the HTTP API.
|
4 |
+
"""
|
5 |
+
|
6 |
+
import os
|
7 |
+
import logging
|
8 |
+
import requests
|
9 |
+
import time
|
10 |
+
from typing import List, Dict, Any, Optional, Tuple, Union
|
11 |
+
|
12 |
+
# Configure logging
|
13 |
+
logger = logging.getLogger("auth-server")
|
14 |
+
|
15 |
+
def get_http_url() -> Tuple[str, str]:
|
16 |
+
"""
|
17 |
+
Get the HTTP URL and auth token for the Turso database.
|
18 |
+
|
19 |
+
Returns:
|
20 |
+
Tuple[str, str]: The HTTP URL and auth token.
|
21 |
+
"""
|
22 |
+
# Extract the database URL and auth token
|
23 |
+
db_url = os.getenv("TURSO_DATABASE_URL", "")
|
24 |
+
auth_token = os.getenv("TURSO_AUTH_TOKEN", "")
|
25 |
+
|
26 |
+
# Convert URL from libsql:// to https://
|
27 |
+
if db_url.startswith("libsql://"):
|
28 |
+
http_url = db_url.replace("libsql://", "https://")
|
29 |
+
else:
|
30 |
+
http_url = db_url
|
31 |
+
|
32 |
+
# Ensure the URL doesn't have a trailing slash
|
33 |
+
http_url = http_url.rstrip('/')
|
34 |
+
|
35 |
+
return http_url, auth_token
|
36 |
+
|
37 |
+
def get_headers(auth_token: str) -> Dict[str, str]:
|
38 |
+
"""
|
39 |
+
Get the headers for the HTTP request.
|
40 |
+
|
41 |
+
Args:
|
42 |
+
auth_token (str): The authentication token.
|
43 |
+
|
44 |
+
Returns:
|
45 |
+
Dict[str, str]: The headers for the HTTP request.
|
46 |
+
"""
|
47 |
+
return {
|
48 |
+
"Authorization": f"Bearer {auth_token}",
|
49 |
+
"Content-Type": "application/json"
|
50 |
+
}
|
51 |
+
|
52 |
+
def execute_query(sql: str, params: List[Dict[str, Any]] = None, operation_id: str = None) -> Dict[str, Any]:
|
53 |
+
"""
|
54 |
+
Execute a SQL query using the HTTP API.
|
55 |
+
|
56 |
+
Args:
|
57 |
+
sql (str): The SQL query to execute.
|
58 |
+
params (List[Dict[str, Any]], optional): The parameters for the query. Defaults to None.
|
59 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
60 |
+
|
61 |
+
Returns:
|
62 |
+
Dict[str, Any]: The response from the database.
|
63 |
+
"""
|
64 |
+
if operation_id is None:
|
65 |
+
operation_id = f"query_{int(time.time())}"
|
66 |
+
|
67 |
+
logger.info(f"[{operation_id}] Executing query: {sql}")
|
68 |
+
|
69 |
+
http_url, auth_token = get_http_url()
|
70 |
+
headers = get_headers(auth_token)
|
71 |
+
|
72 |
+
# Prepare the query
|
73 |
+
query = {
|
74 |
+
"requests": [
|
75 |
+
{
|
76 |
+
"type": "execute",
|
77 |
+
"stmt": {
|
78 |
+
"sql": sql,
|
79 |
+
"args": params or []
|
80 |
+
}
|
81 |
+
},
|
82 |
+
{"type": "close"}
|
83 |
+
]
|
84 |
+
}
|
85 |
+
|
86 |
+
# Send the request
|
87 |
+
try:
|
88 |
+
response = requests.post(f"{http_url}/v2/pipeline", headers=headers, json=query)
|
89 |
+
response.raise_for_status()
|
90 |
+
result = response.json()
|
91 |
+
logger.info(f"[{operation_id}] Query executed successfully")
|
92 |
+
return result
|
93 |
+
except Exception as e:
|
94 |
+
logger.error(f"[{operation_id}] Error executing query: {str(e)}")
|
95 |
+
raise
|
96 |
+
|
97 |
+
def execute_pipeline(requests: List[Dict[str, Any]], operation_id: str = None) -> Dict[str, Any]:
|
98 |
+
"""
|
99 |
+
Execute a pipeline of SQL queries using the HTTP API.
|
100 |
+
|
101 |
+
Args:
|
102 |
+
requests (List[Dict[str, Any]]): The requests to execute.
|
103 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
104 |
+
|
105 |
+
Returns:
|
106 |
+
Dict[str, Any]: The response from the database.
|
107 |
+
"""
|
108 |
+
if operation_id is None:
|
109 |
+
operation_id = f"pipeline_{int(time.time())}"
|
110 |
+
|
111 |
+
logger.info(f"[{operation_id}] Executing pipeline with {len(requests)} requests")
|
112 |
+
|
113 |
+
http_url, auth_token = get_http_url()
|
114 |
+
headers = get_headers(auth_token)
|
115 |
+
|
116 |
+
# Prepare the pipeline
|
117 |
+
pipeline = {
|
118 |
+
"requests": requests + [{"type": "close"}]
|
119 |
+
}
|
120 |
+
|
121 |
+
# Send the request
|
122 |
+
try:
|
123 |
+
response = requests.post(f"{http_url}/v2/pipeline", headers=headers, json=pipeline)
|
124 |
+
response.raise_for_status()
|
125 |
+
result = response.json()
|
126 |
+
logger.info(f"[{operation_id}] Pipeline executed successfully")
|
127 |
+
return result
|
128 |
+
except Exception as e:
|
129 |
+
logger.error(f"[{operation_id}] Error executing pipeline: {str(e)}")
|
130 |
+
raise
|
131 |
+
|
132 |
+
def insert_record(table: str, data: Dict[str, Any], operation_id: str = None) -> Optional[int]:
|
133 |
+
"""
|
134 |
+
Insert a record into a table.
|
135 |
+
|
136 |
+
Args:
|
137 |
+
table (str): The table to insert into.
|
138 |
+
data (Dict[str, Any]): The data to insert.
|
139 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
140 |
+
|
141 |
+
Returns:
|
142 |
+
Optional[int]: The ID of the inserted record, or None if the insert failed.
|
143 |
+
"""
|
144 |
+
if operation_id is None:
|
145 |
+
operation_id = f"insert_{int(time.time())}"
|
146 |
+
|
147 |
+
logger.info(f"[{operation_id}] Inserting record into {table}")
|
148 |
+
|
149 |
+
# Prepare the SQL and parameters
|
150 |
+
columns = list(data.keys())
|
151 |
+
placeholders = ["?"] * len(columns)
|
152 |
+
sql = f"INSERT INTO {table} ({', '.join(columns)}) VALUES ({', '.join(placeholders)})"
|
153 |
+
|
154 |
+
params = []
|
155 |
+
for key, value in data.items():
|
156 |
+
if isinstance(value, str):
|
157 |
+
params.append({"type": "text", "value": value})
|
158 |
+
elif isinstance(value, int):
|
159 |
+
params.append({"type": "integer", "value": str(value)})
|
160 |
+
elif isinstance(value, float):
|
161 |
+
params.append({"type": "float", "value": str(value)})
|
162 |
+
elif value is None:
|
163 |
+
params.append({"type": "null", "value": None})
|
164 |
+
else:
|
165 |
+
params.append({"type": "text", "value": str(value)})
|
166 |
+
|
167 |
+
# Execute the pipeline with insert and get last ID
|
168 |
+
pipeline_requests = [
|
169 |
+
{
|
170 |
+
"type": "execute",
|
171 |
+
"stmt": {
|
172 |
+
"sql": sql,
|
173 |
+
"args": params
|
174 |
+
}
|
175 |
+
},
|
176 |
+
{
|
177 |
+
"type": "execute",
|
178 |
+
"stmt": {"sql": "SELECT last_insert_rowid()"}
|
179 |
+
}
|
180 |
+
]
|
181 |
+
|
182 |
+
try:
|
183 |
+
result = execute_pipeline(pipeline_requests, operation_id)
|
184 |
+
|
185 |
+
# Check for errors in the first request (insert)
|
186 |
+
if "results" in result and len(result["results"]) > 0:
|
187 |
+
if result["results"][0]["type"] == "error":
|
188 |
+
error_msg = result["results"][0]["error"]["message"]
|
189 |
+
logger.error(f"[{operation_id}] Insert error: {error_msg}")
|
190 |
+
return None
|
191 |
+
|
192 |
+
# Try to get the last inserted ID
|
193 |
+
last_id = None
|
194 |
+
if "results" in result and len(result["results"]) > 1:
|
195 |
+
if result["results"][1]["type"] == "ok":
|
196 |
+
rows = result["results"][1]["response"]["result"]["rows"]
|
197 |
+
if rows and len(rows) > 0:
|
198 |
+
last_id = int(rows[0][0]["value"])
|
199 |
+
logger.info(f"[{operation_id}] Record inserted with ID: {last_id}")
|
200 |
+
return last_id
|
201 |
+
|
202 |
+
logger.warning(f"[{operation_id}] Insert succeeded but couldn't get ID")
|
203 |
+
return None
|
204 |
+
except Exception as e:
|
205 |
+
logger.error(f"[{operation_id}] Error inserting record: {str(e)}")
|
206 |
+
return None
|
207 |
+
|
208 |
+
def update_record(table: str, data: Dict[str, Any], condition: str, condition_params: List[Dict[str, Any]], operation_id: str = None) -> bool:
|
209 |
+
"""
|
210 |
+
Update a record in a table.
|
211 |
+
|
212 |
+
Args:
|
213 |
+
table (str): The table to update.
|
214 |
+
data (Dict[str, Any]): The data to update.
|
215 |
+
condition (str): The condition for the update (e.g., "id = ?").
|
216 |
+
condition_params (List[Dict[str, Any]]): The parameters for the condition.
|
217 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
218 |
+
|
219 |
+
Returns:
|
220 |
+
bool: True if the update succeeded, False otherwise.
|
221 |
+
"""
|
222 |
+
if operation_id is None:
|
223 |
+
operation_id = f"update_{int(time.time())}"
|
224 |
+
|
225 |
+
logger.info(f"[{operation_id}] Updating record in {table}")
|
226 |
+
|
227 |
+
# Prepare the SQL and parameters
|
228 |
+
set_clauses = [f"{key} = ?" for key in data.keys()]
|
229 |
+
sql = f"UPDATE {table} SET {', '.join(set_clauses)} WHERE {condition}"
|
230 |
+
|
231 |
+
params = []
|
232 |
+
for key, value in data.items():
|
233 |
+
if isinstance(value, str):
|
234 |
+
params.append({"type": "text", "value": value})
|
235 |
+
elif isinstance(value, int):
|
236 |
+
params.append({"type": "integer", "value": str(value)})
|
237 |
+
elif isinstance(value, float):
|
238 |
+
params.append({"type": "float", "value": str(value)})
|
239 |
+
elif value is None:
|
240 |
+
params.append({"type": "null", "value": None})
|
241 |
+
else:
|
242 |
+
params.append({"type": "text", "value": str(value)})
|
243 |
+
|
244 |
+
# Add condition parameters
|
245 |
+
params.extend(condition_params)
|
246 |
+
|
247 |
+
try:
|
248 |
+
result = execute_query(sql, params, operation_id)
|
249 |
+
|
250 |
+
# Check for errors
|
251 |
+
if "results" in result and len(result["results"]) > 0:
|
252 |
+
if result["results"][0]["type"] == "error":
|
253 |
+
error_msg = result["results"][0]["error"]["message"]
|
254 |
+
logger.error(f"[{operation_id}] Update error: {error_msg}")
|
255 |
+
return False
|
256 |
+
|
257 |
+
# Check affected rows
|
258 |
+
affected_rows = result["results"][0]["response"]["result"]["affected_row_count"]
|
259 |
+
logger.info(f"[{operation_id}] Updated {affected_rows} rows")
|
260 |
+
return affected_rows > 0
|
261 |
+
|
262 |
+
return False
|
263 |
+
except Exception as e:
|
264 |
+
logger.error(f"[{operation_id}] Error updating record: {str(e)}")
|
265 |
+
return False
|
266 |
+
|
267 |
+
def delete_record(table: str, condition: str, condition_params: List[Dict[str, Any]], operation_id: str = None) -> bool:
|
268 |
+
"""
|
269 |
+
Delete a record from a table.
|
270 |
+
|
271 |
+
Args:
|
272 |
+
table (str): The table to delete from.
|
273 |
+
condition (str): The condition for the delete (e.g., "id = ?").
|
274 |
+
condition_params (List[Dict[str, Any]]): The parameters for the condition.
|
275 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
276 |
+
|
277 |
+
Returns:
|
278 |
+
bool: True if the delete succeeded, False otherwise.
|
279 |
+
"""
|
280 |
+
if operation_id is None:
|
281 |
+
operation_id = f"delete_{int(time.time())}"
|
282 |
+
|
283 |
+
logger.info(f"[{operation_id}] Deleting record from {table}")
|
284 |
+
|
285 |
+
# Prepare the SQL
|
286 |
+
sql = f"DELETE FROM {table} WHERE {condition}"
|
287 |
+
|
288 |
+
try:
|
289 |
+
result = execute_query(sql, condition_params, operation_id)
|
290 |
+
|
291 |
+
# Check for errors
|
292 |
+
if "results" in result and len(result["results"]) > 0:
|
293 |
+
if result["results"][0]["type"] == "error":
|
294 |
+
error_msg = result["results"][0]["error"]["message"]
|
295 |
+
logger.error(f"[{operation_id}] Delete error: {error_msg}")
|
296 |
+
return False
|
297 |
+
|
298 |
+
# Check affected rows
|
299 |
+
affected_rows = result["results"][0]["response"]["result"]["affected_row_count"]
|
300 |
+
logger.info(f"[{operation_id}] Deleted {affected_rows} rows")
|
301 |
+
return affected_rows > 0
|
302 |
+
|
303 |
+
return False
|
304 |
+
except Exception as e:
|
305 |
+
logger.error(f"[{operation_id}] Error deleting record: {str(e)}")
|
306 |
+
return False
|
307 |
+
|
308 |
+
def select_records(table: str, columns: List[str] = None, condition: str = None,
|
309 |
+
condition_params: List[Dict[str, Any]] = None, limit: int = None,
|
310 |
+
offset: int = None, order_by: str = None, operation_id: str = None) -> List[Dict[str, Any]]:
|
311 |
+
"""
|
312 |
+
Select records from a table.
|
313 |
+
|
314 |
+
Args:
|
315 |
+
table (str): The table to select from.
|
316 |
+
columns (List[str], optional): The columns to select. Defaults to None (all columns).
|
317 |
+
condition (str, optional): The condition for the select. Defaults to None.
|
318 |
+
condition_params (List[Dict[str, Any]], optional): The parameters for the condition. Defaults to None.
|
319 |
+
limit (int, optional): The maximum number of records to return. Defaults to None.
|
320 |
+
offset (int, optional): The number of records to skip. Defaults to None.
|
321 |
+
order_by (str, optional): The order by clause. Defaults to None.
|
322 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
323 |
+
|
324 |
+
Returns:
|
325 |
+
List[Dict[str, Any]]: The selected records.
|
326 |
+
"""
|
327 |
+
if operation_id is None:
|
328 |
+
operation_id = f"select_{int(time.time())}"
|
329 |
+
|
330 |
+
logger.info(f"[{operation_id}] Selecting records from {table}")
|
331 |
+
|
332 |
+
# Prepare the SQL
|
333 |
+
cols = "*" if not columns else ", ".join(columns)
|
334 |
+
sql = f"SELECT {cols} FROM {table}"
|
335 |
+
|
336 |
+
if condition:
|
337 |
+
sql += f" WHERE {condition}"
|
338 |
+
|
339 |
+
if order_by:
|
340 |
+
sql += f" ORDER BY {order_by}"
|
341 |
+
|
342 |
+
if limit:
|
343 |
+
sql += f" LIMIT {limit}"
|
344 |
+
|
345 |
+
if offset:
|
346 |
+
sql += f" OFFSET {offset}"
|
347 |
+
|
348 |
+
try:
|
349 |
+
result = execute_query(sql, condition_params, operation_id)
|
350 |
+
|
351 |
+
# Check for errors
|
352 |
+
if "results" in result and len(result["results"]) > 0:
|
353 |
+
if result["results"][0]["type"] == "error":
|
354 |
+
error_msg = result["results"][0]["error"]["message"]
|
355 |
+
logger.error(f"[{operation_id}] Select error: {error_msg}")
|
356 |
+
return []
|
357 |
+
|
358 |
+
# Extract the records
|
359 |
+
response = result["results"][0]["response"]
|
360 |
+
if "result" in response and "rows" in response["result"]:
|
361 |
+
rows = response["result"]["rows"]
|
362 |
+
cols = response["result"]["cols"]
|
363 |
+
|
364 |
+
# Convert rows to dictionaries
|
365 |
+
records = []
|
366 |
+
for row in rows:
|
367 |
+
record = {}
|
368 |
+
for i, col in enumerate(cols):
|
369 |
+
col_name = col["name"]
|
370 |
+
value = row[i]["value"]
|
371 |
+
|
372 |
+
# Convert value based on type
|
373 |
+
if row[i]["type"] == "integer":
|
374 |
+
value = int(value)
|
375 |
+
elif row[i]["type"] == "float":
|
376 |
+
value = float(value)
|
377 |
+
elif row[i]["type"] == "null":
|
378 |
+
value = None
|
379 |
+
|
380 |
+
record[col_name] = value
|
381 |
+
|
382 |
+
records.append(record)
|
383 |
+
|
384 |
+
logger.info(f"[{operation_id}] Selected {len(records)} records")
|
385 |
+
return records
|
386 |
+
|
387 |
+
logger.warning(f"[{operation_id}] No records found")
|
388 |
+
return []
|
389 |
+
except Exception as e:
|
390 |
+
logger.error(f"[{operation_id}] Error selecting records: {str(e)}")
|
391 |
+
return []
|
392 |
+
|
393 |
+
def get_record_by_id(table: str, id: int, columns: List[str] = None, operation_id: str = None) -> Optional[Dict[str, Any]]:
|
394 |
+
"""
|
395 |
+
Get a record by ID.
|
396 |
+
|
397 |
+
Args:
|
398 |
+
table (str): The table to select from.
|
399 |
+
id (int): The ID of the record.
|
400 |
+
columns (List[str], optional): The columns to select. Defaults to None (all columns).
|
401 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
402 |
+
|
403 |
+
Returns:
|
404 |
+
Optional[Dict[str, Any]]: The record, or None if not found.
|
405 |
+
"""
|
406 |
+
if operation_id is None:
|
407 |
+
operation_id = f"get_by_id_{int(time.time())}"
|
408 |
+
|
409 |
+
condition = "id = ?"
|
410 |
+
condition_params = [{"type": "integer", "value": str(id)}]
|
411 |
+
|
412 |
+
records = select_records(table, columns, condition, condition_params, limit=1, operation_id=operation_id)
|
413 |
+
|
414 |
+
if records:
|
415 |
+
return records[0]
|
416 |
+
|
417 |
+
return None
|
418 |
+
|
419 |
+
def count_records(table: str, condition: str = None, condition_params: List[Dict[str, Any]] = None, operation_id: str = None) -> int:
|
420 |
+
"""
|
421 |
+
Count records in a table.
|
422 |
+
|
423 |
+
Args:
|
424 |
+
table (str): The table to count records in.
|
425 |
+
condition (str, optional): The condition for the count. Defaults to None.
|
426 |
+
condition_params (List[Dict[str, Any]], optional): The parameters for the condition. Defaults to None.
|
427 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
428 |
+
|
429 |
+
Returns:
|
430 |
+
int: The number of records.
|
431 |
+
"""
|
432 |
+
if operation_id is None:
|
433 |
+
operation_id = f"count_{int(time.time())}"
|
434 |
+
|
435 |
+
logger.info(f"[{operation_id}] Counting records in {table}")
|
436 |
+
|
437 |
+
# Prepare the SQL
|
438 |
+
sql = f"SELECT COUNT(*) FROM {table}"
|
439 |
+
|
440 |
+
if condition:
|
441 |
+
sql += f" WHERE {condition}"
|
442 |
+
|
443 |
+
try:
|
444 |
+
result = execute_query(sql, condition_params, operation_id)
|
445 |
+
|
446 |
+
# Check for errors
|
447 |
+
if "results" in result and len(result["results"]) > 0:
|
448 |
+
if result["results"][0]["type"] == "error":
|
449 |
+
error_msg = result["results"][0]["error"]["message"]
|
450 |
+
logger.error(f"[{operation_id}] Count error: {error_msg}")
|
451 |
+
return 0
|
452 |
+
|
453 |
+
# Extract the count
|
454 |
+
response = result["results"][0]["response"]
|
455 |
+
if "result" in response and "rows" in response["result"] and response["result"]["rows"]:
|
456 |
+
count = int(response["result"]["rows"][0][0]["value"])
|
457 |
+
logger.info(f"[{operation_id}] Counted {count} records")
|
458 |
+
return count
|
459 |
+
|
460 |
+
logger.warning(f"[{operation_id}] Count failed")
|
461 |
+
return 0
|
462 |
+
except Exception as e:
|
463 |
+
logger.error(f"[{operation_id}] Error counting records: {str(e)}")
|
464 |
+
return 0
|
465 |
+
|
466 |
+
def record_exists(table: str, condition: str, condition_params: List[Dict[str, Any]], operation_id: str = None) -> bool:
|
467 |
+
"""
|
468 |
+
Check if a record exists in a table.
|
469 |
+
|
470 |
+
Args:
|
471 |
+
table (str): The table to check.
|
472 |
+
condition (str): The condition for the check.
|
473 |
+
condition_params (List[Dict[str, Any]]): The parameters for the condition.
|
474 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
475 |
+
|
476 |
+
Returns:
|
477 |
+
bool: True if the record exists, False otherwise.
|
478 |
+
"""
|
479 |
+
count = count_records(table, condition, condition_params, operation_id)
|
480 |
+
return count > 0
|
481 |
+
|
482 |
+
def create_table_if_not_exists(table: str, schema: str, operation_id: str = None) -> bool:
|
483 |
+
"""
|
484 |
+
Create a table if it doesn't exist.
|
485 |
+
|
486 |
+
Args:
|
487 |
+
table (str): The table to create.
|
488 |
+
schema (str): The schema for the table.
|
489 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
490 |
+
|
491 |
+
Returns:
|
492 |
+
bool: True if the table was created or already exists, False otherwise.
|
493 |
+
"""
|
494 |
+
if operation_id is None:
|
495 |
+
operation_id = f"create_table_{int(time.time())}"
|
496 |
+
|
497 |
+
logger.info(f"[{operation_id}] Creating table {table} if not exists")
|
498 |
+
|
499 |
+
# Prepare the SQL
|
500 |
+
sql = f"CREATE TABLE IF NOT EXISTS {table} ({schema})"
|
501 |
+
|
502 |
+
try:
|
503 |
+
result = execute_query(sql, operation_id=operation_id)
|
504 |
+
|
505 |
+
# Check for errors
|
506 |
+
if "results" in result and len(result["results"]) > 0:
|
507 |
+
if result["results"][0]["type"] == "error":
|
508 |
+
error_msg = result["results"][0]["error"]["message"]
|
509 |
+
logger.error(f"[{operation_id}] Create table error: {error_msg}")
|
510 |
+
return False
|
511 |
+
|
512 |
+
logger.info(f"[{operation_id}] Table {table} created or already exists")
|
513 |
+
return True
|
514 |
+
|
515 |
+
return False
|
516 |
+
except Exception as e:
|
517 |
+
logger.error(f"[{operation_id}] Error creating table: {str(e)}")
|
518 |
+
return False
|
519 |
+
|
520 |
+
def execute_transaction(queries: List[Tuple[str, List[Dict[str, Any]]]], operation_id: str = None) -> bool:
|
521 |
+
"""
|
522 |
+
Execute a transaction with multiple queries.
|
523 |
+
|
524 |
+
Args:
|
525 |
+
queries (List[Tuple[str, List[Dict[str, Any]]]]): The queries to execute.
|
526 |
+
operation_id (str, optional): A unique identifier for the operation. Defaults to None.
|
527 |
+
|
528 |
+
Returns:
|
529 |
+
bool: True if the transaction succeeded, False otherwise.
|
530 |
+
"""
|
531 |
+
if operation_id is None:
|
532 |
+
operation_id = f"transaction_{int(time.time())}"
|
533 |
+
|
534 |
+
logger.info(f"[{operation_id}] Executing transaction with {len(queries)} queries")
|
535 |
+
|
536 |
+
# Prepare the pipeline
|
537 |
+
requests = [{"type": "execute", "stmt": {"sql": "BEGIN"}}]
|
538 |
+
|
539 |
+
for sql, params in queries:
|
540 |
+
requests.append({
|
541 |
+
"type": "execute",
|
542 |
+
"stmt": {
|
543 |
+
"sql": sql,
|
544 |
+
"args": params or []
|
545 |
+
}
|
546 |
+
})
|
547 |
+
|
548 |
+
requests.append({"type": "execute", "stmt": {"sql": "COMMIT"}})
|
549 |
+
|
550 |
+
try:
|
551 |
+
result = execute_pipeline(requests, operation_id)
|
552 |
+
|
553 |
+
# Check for errors
|
554 |
+
for i, res in enumerate(result.get("results", [])):
|
555 |
+
if res.get("type") == "error":
|
556 |
+
error_msg = res.get("error", {}).get("message", "Unknown error")
|
557 |
+
logger.error(f"[{operation_id}] Transaction error in query {i}: {error_msg}")
|
558 |
+
|
559 |
+
# Try to rollback
|
560 |
+
try:
|
561 |
+
execute_query("ROLLBACK", operation_id=f"{operation_id}_rollback")
|
562 |
+
except:
|
563 |
+
pass
|
564 |
+
|
565 |
+
return False
|
566 |
+
|
567 |
+
logger.info(f"[{operation_id}] Transaction executed successfully")
|
568 |
+
return True
|
569 |
+
except Exception as e:
|
570 |
+
logger.error(f"[{operation_id}] Error executing transaction: {str(e)}")
|
571 |
+
|
572 |
+
# Try to rollback
|
573 |
+
try:
|
574 |
+
execute_query("ROLLBACK", operation_id=f"{operation_id}_rollback")
|
575 |
+
except:
|
576 |
+
pass
|
577 |
+
|
578 |
+
return False
|
main.py
CHANGED
@@ -12,6 +12,10 @@ import uvicorn
|
|
12 |
from dotenv import load_dotenv
|
13 |
from passlib.context import CryptContext
|
14 |
from pydantic import BaseModel, EmailStr
|
|
|
|
|
|
|
|
|
15 |
|
16 |
# Configure logging
|
17 |
logging.basicConfig(
|
@@ -461,6 +465,71 @@ async def startup_db_client():
|
|
461 |
# This function is kept for reference but is not used anymore
|
462 |
pass
|
463 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
464 |
# Initialize database connection
|
465 |
connected = False
|
466 |
|
@@ -867,6 +936,13 @@ async def startup_db_client():
|
|
867 |
|
868 |
# Create super user if it doesn't exist using HTTP API
|
869 |
create_super_user_http()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
870 |
except Exception as e:
|
871 |
error_msg = f"Failed to create database tables: {str(e)}"
|
872 |
logger.error(error_msg)
|
@@ -874,6 +950,13 @@ async def startup_db_client():
|
|
874 |
# This allows the server to start and use existing tables if they exist
|
875 |
logger.warning("Continuing server startup despite table creation errors")
|
876 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
877 |
@app.on_event("shutdown")
|
878 |
async def shutdown_db_client():
|
879 |
logger.info("Shutting down database connection")
|
@@ -2091,162 +2174,203 @@ async def test_db_http():
|
|
2091 |
results["success_count"] += 1
|
2092 |
results["total_count"] += 1
|
2093 |
|
2094 |
-
|
2095 |
-
|
2096 |
-
"requests": [
|
2097 |
-
{
|
2098 |
-
"type": "execute",
|
2099 |
-
"stmt": {
|
2100 |
-
"sql": """
|
2101 |
-
CREATE TABLE IF NOT EXISTS users (
|
2102 |
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
2103 |
-
email TEXT NOT NULL UNIQUE,
|
2104 |
-
hashed_password TEXT NOT NULL,
|
2105 |
-
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
2106 |
-
last_login DATETIME,
|
2107 |
-
is_admin INTEGER DEFAULT 0
|
2108 |
-
)
|
2109 |
-
"""
|
2110 |
-
}
|
2111 |
-
},
|
2112 |
-
{"type": "close"}
|
2113 |
-
]
|
2114 |
-
}
|
2115 |
|
2116 |
-
|
2117 |
-
|
2118 |
|
2119 |
-
|
2120 |
-
"name": "Create users table (HTTP API)",
|
2121 |
-
"success": True
|
2122 |
-
})
|
2123 |
-
results["success_count"] += 1
|
2124 |
-
results["total_count"] += 1
|
2125 |
|
2126 |
-
|
2127 |
-
|
2128 |
-
|
|
|
|
|
2129 |
|
2130 |
-
|
2131 |
-
|
2132 |
-
|
|
|
2133 |
|
2134 |
-
|
2135 |
-
|
2136 |
-
|
2137 |
-
"type": "execute",
|
2138 |
-
"stmt": {
|
2139 |
-
"sql": "INSERT INTO users (email, hashed_password) VALUES (?, ?)",
|
2140 |
-
"args": [
|
2141 |
-
{"type": "text", "value": test_email},
|
2142 |
-
{"type": "text", "value": hashed_password}
|
2143 |
-
]
|
2144 |
-
}
|
2145 |
-
},
|
2146 |
-
{
|
2147 |
-
"type": "execute",
|
2148 |
-
"stmt": {"sql": "SELECT last_insert_rowid()"}
|
2149 |
-
},
|
2150 |
-
{"type": "close"}
|
2151 |
-
]
|
2152 |
-
}
|
2153 |
|
2154 |
-
|
2155 |
-
|
2156 |
-
create_user_result = create_user_response.json()
|
2157 |
|
2158 |
-
|
2159 |
-
|
2160 |
-
|
2161 |
-
|
2162 |
-
if "rows" in result and len(result["rows"]) > 0:
|
2163 |
-
user_id = result["rows"][0]["values"][0]
|
2164 |
|
2165 |
-
|
2166 |
-
|
|
|
|
|
|
|
2167 |
"success": True,
|
2168 |
-
"
|
2169 |
-
"email": test_email
|
2170 |
})
|
2171 |
-
results["success_count"] += 1
|
2172 |
-
results["total_count"] += 1
|
2173 |
|
2174 |
-
# Test
|
2175 |
-
|
2176 |
-
|
2177 |
-
|
2178 |
-
|
2179 |
-
|
2180 |
-
|
2181 |
-
|
2182 |
-
|
2183 |
-
|
2184 |
-
|
2185 |
-
|
2186 |
-
|
2187 |
-
|
2188 |
-
}
|
2189 |
|
2190 |
-
|
2191 |
-
|
2192 |
-
|
2193 |
|
2194 |
-
|
2195 |
-
|
2196 |
-
|
2197 |
-
|
2198 |
-
|
2199 |
-
user_found = True
|
2200 |
|
2201 |
-
results["
|
2202 |
-
"name": "
|
2203 |
-
"success":
|
2204 |
-
"
|
2205 |
})
|
2206 |
|
2207 |
-
|
2208 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2209 |
|
2210 |
-
results["
|
|
|
|
|
|
|
|
|
2211 |
|
2212 |
-
# Test
|
2213 |
-
|
2214 |
-
|
2215 |
-
{
|
2216 |
-
"type": "execute",
|
2217 |
-
"stmt": {"sql": "SELECT id, email FROM users"}
|
2218 |
-
},
|
2219 |
-
{"type": "close"}
|
2220 |
-
]
|
2221 |
-
}
|
2222 |
|
2223 |
-
|
2224 |
-
|
2225 |
-
|
|
|
|
|
|
|
|
|
2226 |
|
2227 |
-
|
2228 |
-
|
2229 |
-
|
2230 |
-
|
2231 |
-
if "rows" in result:
|
2232 |
-
for row in result["rows"]:
|
2233 |
-
all_users.append(row["values"])
|
2234 |
|
2235 |
-
|
2236 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2237 |
"success": True,
|
2238 |
-
"
|
|
|
2239 |
})
|
2240 |
-
results["success_count"] += 1
|
2241 |
-
results["total_count"] += 1
|
2242 |
|
2243 |
-
|
2244 |
-
|
2245 |
|
2246 |
-
|
2247 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2248 |
|
2249 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2250 |
|
2251 |
if __name__ == "__main__":
|
2252 |
port = int(os.getenv("PORT", 7860))
|
|
|
12 |
from dotenv import load_dotenv
|
13 |
from passlib.context import CryptContext
|
14 |
from pydantic import BaseModel, EmailStr
|
15 |
+
from typing import Dict, Any, List, Optional
|
16 |
+
|
17 |
+
# Import the HTTP API database utility
|
18 |
+
from app.utils import db_http
|
19 |
|
20 |
# Configure logging
|
21 |
logging.basicConfig(
|
|
|
465 |
# This function is kept for reference but is not used anymore
|
466 |
pass
|
467 |
|
468 |
+
# Function to create database tables using HTTP API
|
469 |
+
def create_tables_http():
|
470 |
+
"""
|
471 |
+
Create database tables if they don't exist using the HTTP API.
|
472 |
+
"""
|
473 |
+
operation_id = f"create_tables_http_{int(time.time())}"
|
474 |
+
logger.info(f"[{operation_id}] Creating database tables using HTTP API")
|
475 |
+
|
476 |
+
try:
|
477 |
+
# Import the HTTP API utility
|
478 |
+
from app.utils import db_http
|
479 |
+
|
480 |
+
# Create users table
|
481 |
+
logger.info(f"[{operation_id}] Creating users table")
|
482 |
+
db_http.create_table_if_not_exists(
|
483 |
+
"users",
|
484 |
+
"""
|
485 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
486 |
+
email TEXT NOT NULL UNIQUE,
|
487 |
+
hashed_password TEXT NOT NULL CHECK(LENGTH(hashed_password) = 97),
|
488 |
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
489 |
+
last_login DATETIME,
|
490 |
+
is_admin BOOLEAN DEFAULT 0
|
491 |
+
""",
|
492 |
+
operation_id=f"{operation_id}_users"
|
493 |
+
)
|
494 |
+
|
495 |
+
# Create projects table
|
496 |
+
logger.info(f"[{operation_id}] Creating projects table")
|
497 |
+
db_http.create_table_if_not_exists(
|
498 |
+
"projects",
|
499 |
+
"""
|
500 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
501 |
+
owner_id INTEGER NOT NULL,
|
502 |
+
title TEXT NOT NULL,
|
503 |
+
description TEXT,
|
504 |
+
geojson TEXT,
|
505 |
+
storage_bucket TEXT,
|
506 |
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
507 |
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
508 |
+
FOREIGN KEY (owner_id) REFERENCES users (id)
|
509 |
+
""",
|
510 |
+
operation_id=f"{operation_id}_projects"
|
511 |
+
)
|
512 |
+
|
513 |
+
# Create journals table
|
514 |
+
logger.info(f"[{operation_id}] Creating journals table")
|
515 |
+
db_http.create_table_if_not_exists(
|
516 |
+
"journals",
|
517 |
+
"""
|
518 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
519 |
+
project_id INTEGER NOT NULL,
|
520 |
+
title TEXT NOT NULL,
|
521 |
+
content TEXT,
|
522 |
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
523 |
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
524 |
+
FOREIGN KEY (project_id) REFERENCES projects (id)
|
525 |
+
""",
|
526 |
+
operation_id=f"{operation_id}_journals"
|
527 |
+
)
|
528 |
+
|
529 |
+
logger.info(f"[{operation_id}] All database tables created successfully")
|
530 |
+
except Exception as e:
|
531 |
+
logger.error(f"[{operation_id}] Error creating database tables: {str(e)}")
|
532 |
+
|
533 |
# Initialize database connection
|
534 |
connected = False
|
535 |
|
|
|
936 |
|
937 |
# Create super user if it doesn't exist using HTTP API
|
938 |
create_super_user_http()
|
939 |
+
|
940 |
+
# Also create tables using HTTP API for better reliability
|
941 |
+
try:
|
942 |
+
logger.info("Creating tables using HTTP API for better reliability")
|
943 |
+
create_tables_http()
|
944 |
+
except Exception as e:
|
945 |
+
logger.error(f"Error creating tables with HTTP API: {str(e)}")
|
946 |
except Exception as e:
|
947 |
error_msg = f"Failed to create database tables: {str(e)}"
|
948 |
logger.error(error_msg)
|
|
|
950 |
# This allows the server to start and use existing tables if they exist
|
951 |
logger.warning("Continuing server startup despite table creation errors")
|
952 |
|
953 |
+
# Try to create tables using HTTP API as a fallback
|
954 |
+
try:
|
955 |
+
logger.info("Trying to create tables using HTTP API as a fallback")
|
956 |
+
create_tables_http()
|
957 |
+
except Exception as e:
|
958 |
+
logger.error(f"Error creating tables with HTTP API fallback: {str(e)}")
|
959 |
+
|
960 |
@app.on_event("shutdown")
|
961 |
async def shutdown_db_client():
|
962 |
logger.info("Shutting down database connection")
|
|
|
2174 |
results["success_count"] += 1
|
2175 |
results["total_count"] += 1
|
2176 |
|
2177 |
+
except Exception as e:
|
2178 |
+
results["error"] = str(e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2179 |
|
2180 |
+
# Add success rate
|
2181 |
+
results["success_rate"] = f"{results['success_count']}/{results['total_count']}"
|
2182 |
|
2183 |
+
return results
|
|
|
|
|
|
|
|
|
|
|
2184 |
|
2185 |
+
@app.get("/test-db-http-util", tags=["General"])
|
2186 |
+
async def test_db_http_util():
|
2187 |
+
"""
|
2188 |
+
Test the database connection and operations using the HTTP API utility.
|
2189 |
+
This endpoint is for testing purposes only.
|
2190 |
|
2191 |
+
Returns:
|
2192 |
+
dict: Information about the database tests.
|
2193 |
+
"""
|
2194 |
+
logger.info("Test database HTTP API utility endpoint accessed")
|
2195 |
|
2196 |
+
try:
|
2197 |
+
# Import the HTTP API utility
|
2198 |
+
from app.utils import db_http
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2199 |
|
2200 |
+
operation_id = f"test_db_http_util_{int(time.time())}"
|
2201 |
+
logger.info(f"[{operation_id}] Starting HTTP API utility test")
|
|
|
2202 |
|
2203 |
+
results = {
|
2204 |
+
"connection_type": "http-api-util",
|
2205 |
+
"operations": []
|
2206 |
+
}
|
|
|
|
|
2207 |
|
2208 |
+
# Test 1: Simple SELECT
|
2209 |
+
logger.info(f"[{operation_id}] Test 1: Simple SELECT")
|
2210 |
+
result = db_http.execute_query("SELECT 1 as test", operation_id=f"{operation_id}_1")
|
2211 |
+
results["operations"].append({
|
2212 |
+
"name": "Simple SELECT",
|
2213 |
"success": True,
|
2214 |
+
"result": str(result)
|
|
|
2215 |
})
|
|
|
|
|
2216 |
|
2217 |
+
# Test 2: Create a temporary table
|
2218 |
+
logger.info(f"[{operation_id}] Test 2: Create a temporary table")
|
2219 |
+
db_http.create_table_if_not_exists(
|
2220 |
+
"test_table_http_util",
|
2221 |
+
"""
|
2222 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
2223 |
+
value TEXT NOT NULL,
|
2224 |
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
2225 |
+
""",
|
2226 |
+
operation_id=f"{operation_id}_2"
|
2227 |
+
)
|
2228 |
+
results["operations"].append({
|
2229 |
+
"name": "Create temporary table",
|
2230 |
+
"success": True
|
2231 |
+
})
|
2232 |
|
2233 |
+
# Test 3: Insert into the temporary table
|
2234 |
+
logger.info(f"[{operation_id}] Test 3: Insert into the temporary table")
|
2235 |
+
test_value = f"test_value_http_util_{int(time.time())}"
|
2236 |
|
2237 |
+
insert_id = db_http.insert_record(
|
2238 |
+
"test_table_http_util",
|
2239 |
+
{"value": test_value},
|
2240 |
+
operation_id=f"{operation_id}_3"
|
2241 |
+
)
|
|
|
2242 |
|
2243 |
+
results["operations"].append({
|
2244 |
+
"name": "Insert into temporary table",
|
2245 |
+
"success": insert_id is not None,
|
2246 |
+
"insert_id": insert_id
|
2247 |
})
|
2248 |
|
2249 |
+
# Test 4: Select from the temporary table
|
2250 |
+
logger.info(f"[{operation_id}] Test 4: Select from the temporary table")
|
2251 |
+
records = db_http.select_records(
|
2252 |
+
"test_table_http_util",
|
2253 |
+
condition="value = ?",
|
2254 |
+
condition_params=[{"type": "text", "value": test_value}],
|
2255 |
+
limit=1,
|
2256 |
+
operation_id=f"{operation_id}_4"
|
2257 |
+
)
|
2258 |
|
2259 |
+
results["operations"].append({
|
2260 |
+
"name": "Select from temporary table",
|
2261 |
+
"success": len(records) > 0,
|
2262 |
+
"result": str(records[0]) if records else None
|
2263 |
+
})
|
2264 |
|
2265 |
+
# Test 5: Update the record
|
2266 |
+
logger.info(f"[{operation_id}] Test 5: Update the record")
|
2267 |
+
updated_value = f"{test_value}_updated"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2268 |
|
2269 |
+
success = db_http.update_record(
|
2270 |
+
"test_table_http_util",
|
2271 |
+
{"value": updated_value},
|
2272 |
+
"id = ?",
|
2273 |
+
[{"type": "integer", "value": str(insert_id)}],
|
2274 |
+
operation_id=f"{operation_id}_5"
|
2275 |
+
)
|
2276 |
|
2277 |
+
results["operations"].append({
|
2278 |
+
"name": "Update record",
|
2279 |
+
"success": success
|
2280 |
+
})
|
|
|
|
|
|
|
2281 |
|
2282 |
+
# Test 6: Verify the update
|
2283 |
+
logger.info(f"[{operation_id}] Test 6: Verify the update")
|
2284 |
+
updated_records = db_http.select_records(
|
2285 |
+
"test_table_http_util",
|
2286 |
+
condition="id = ?",
|
2287 |
+
condition_params=[{"type": "integer", "value": str(insert_id)}],
|
2288 |
+
limit=1,
|
2289 |
+
operation_id=f"{operation_id}_6"
|
2290 |
+
)
|
2291 |
+
|
2292 |
+
results["operations"].append({
|
2293 |
+
"name": "Verify update",
|
2294 |
+
"success": len(updated_records) > 0 and updated_records[0].get("value") == updated_value,
|
2295 |
+
"result": str(updated_records[0]) if updated_records else None
|
2296 |
+
})
|
2297 |
+
|
2298 |
+
# Test 7: Count records
|
2299 |
+
logger.info(f"[{operation_id}] Test 7: Count records")
|
2300 |
+
count = db_http.count_records("test_table_http_util", operation_id=f"{operation_id}_7")
|
2301 |
+
|
2302 |
+
results["operations"].append({
|
2303 |
+
"name": "Count records",
|
2304 |
+
"success": count > 0,
|
2305 |
+
"count": count
|
2306 |
+
})
|
2307 |
+
|
2308 |
+
# Test 8: List all records
|
2309 |
+
logger.info(f"[{operation_id}] Test 8: List all records")
|
2310 |
+
all_records = db_http.select_records(
|
2311 |
+
"test_table_http_util",
|
2312 |
+
limit=10,
|
2313 |
+
operation_id=f"{operation_id}_8"
|
2314 |
+
)
|
2315 |
+
|
2316 |
+
results["operations"].append({
|
2317 |
+
"name": "List all records",
|
2318 |
"success": True,
|
2319 |
+
"count": len(all_records),
|
2320 |
+
"records": all_records
|
2321 |
})
|
|
|
|
|
2322 |
|
2323 |
+
# Test 9: Create a test user
|
2324 |
+
logger.info(f"[{operation_id}] Test 9: Create a test user")
|
2325 |
|
2326 |
+
# Create password hashing context
|
2327 |
+
from app.api.routes.auth_router import get_password_hash
|
2328 |
+
|
2329 |
+
test_user_email = f"test_user_http_util_{int(time.time())}@example.com"
|
2330 |
+
test_user_password = "TestPassword123!"
|
2331 |
+
hashed_password = get_password_hash(test_user_password)
|
2332 |
+
|
2333 |
+
user_id = db_http.insert_record(
|
2334 |
+
"users",
|
2335 |
+
{
|
2336 |
+
"email": test_user_email,
|
2337 |
+
"hashed_password": hashed_password,
|
2338 |
+
"is_admin": 0
|
2339 |
+
},
|
2340 |
+
operation_id=f"{operation_id}_9"
|
2341 |
+
)
|
2342 |
|
2343 |
+
results["operations"].append({
|
2344 |
+
"name": "Create test user",
|
2345 |
+
"success": user_id is not None,
|
2346 |
+
"user_id": user_id,
|
2347 |
+
"email": test_user_email
|
2348 |
+
})
|
2349 |
+
|
2350 |
+
# Test 10: Verify the user was created
|
2351 |
+
logger.info(f"[{operation_id}] Test 10: Verify the user was created")
|
2352 |
+
users = db_http.select_records(
|
2353 |
+
"users",
|
2354 |
+
condition="email = ?",
|
2355 |
+
condition_params=[{"type": "text", "value": test_user_email}],
|
2356 |
+
limit=1,
|
2357 |
+
operation_id=f"{operation_id}_10"
|
2358 |
+
)
|
2359 |
+
|
2360 |
+
results["operations"].append({
|
2361 |
+
"name": "Verify test user",
|
2362 |
+
"success": len(users) > 0,
|
2363 |
+
"result": str(users[0]) if users else None
|
2364 |
+
})
|
2365 |
+
|
2366 |
+
logger.info(f"[{operation_id}] HTTP API utility test completed successfully")
|
2367 |
+
return results
|
2368 |
+
except Exception as e:
|
2369 |
+
logger.error(f"Error during HTTP API utility test: {str(e)}")
|
2370 |
+
return {
|
2371 |
+
"error": str(e),
|
2372 |
+
"operations": []
|
2373 |
+
}
|
2374 |
|
2375 |
if __name__ == "__main__":
|
2376 |
port = int(os.getenv("PORT", 7860))
|