3v324v23 commited on
Commit
1a7090c
·
1 Parent(s): 6517c54
Files changed (1) hide show
  1. app.py +148 -24
app.py CHANGED
@@ -32,7 +32,7 @@ task_history = []
32
  # Lock for shared resources
33
  lock = threading.Lock()
34
  # Number of worker threads - set to 1 to process one task at a time
35
- worker_threads = 1 # 只处理一个任务,避免资源竞争
36
  # Flag for running background threads
37
  running = True
38
  # Mapping from task type to processing time
@@ -41,6 +41,16 @@ task_type_times = {}
41
  processing_count = 0
42
  # Available CPU cores for task processing
43
  available_cores = multiprocessing.cpu_count()
 
 
 
 
 
 
 
 
 
 
44
 
45
  def queue_processor():
46
  """Process tasks in the queue"""
@@ -54,18 +64,26 @@ def queue_processor():
54
  # Already processing a task, wait and try again
55
  time.sleep(0.5)
56
  continue
57
-
 
 
 
 
 
58
  # Get a task from the queue with small timeout to prevent blocking
59
  task_id, input_data, request_time = task_queue.get(timeout=0.1)
 
60
 
61
  # Increment processing count to track active tasks
62
  with lock:
63
  processing_count += 1
 
64
 
65
  # Update task status
66
  if task_id in task_status:
67
  task_status[task_id]['status'] = 'processing'
68
  task_status[task_id]['start_time'] = time.time()
 
69
  else:
70
  # Create task status entry if it doesn't exist
71
  task_status[task_id] = {
@@ -73,6 +91,7 @@ def queue_processor():
73
  'queued_time': request_time,
74
  'start_time': time.time()
75
  }
 
76
 
77
  if isinstance(input_data, list) and len(input_data) > 0:
78
  sample_task = input_data[0]
@@ -87,7 +106,9 @@ def queue_processor():
87
  'complexity': task_complexity
88
  }
89
 
 
90
  result = evaluate(input_data)
 
91
 
92
  end_time = time.time()
93
  process_time = end_time - task_status[task_id]['start_time']
@@ -95,12 +116,14 @@ def queue_processor():
95
  with lock:
96
  # Decrease processing count now that we're done
97
  processing_count -= 1
 
98
 
99
  # Update task status
100
  task_status[task_id]['status'] = 'completed'
101
  task_status[task_id]['result'] = result
102
  task_status[task_id]['end_time'] = end_time
103
  task_status[task_id]['process_time'] = process_time
 
104
 
105
  if 'estimated_factors' in task_status[task_id]:
106
  factors = task_status[task_id]['estimated_factors']
@@ -124,25 +147,31 @@ def queue_processor():
124
  task_history.pop(0)
125
 
126
  task_queue.task_done()
 
127
 
128
  except queue.Empty:
129
  continue
130
  except Exception as e:
 
131
  if 'task_id' in locals():
 
132
  with lock:
133
  # Decrease processing count on error
134
  processing_count -= 1
 
135
 
136
  if task_id in task_status:
137
  task_status[task_id]['status'] = 'error'
138
  task_status[task_id]['error'] = str(e)
139
  task_status[task_id]['end_time'] = time.time()
 
140
  else:
141
  task_status[task_id] = {
142
  'status': 'error',
143
  'error': str(e),
144
  'end_time': time.time()
145
  }
 
146
  task_queue.task_done()
147
 
148
  def _estimate_task_complexity(tasks):
@@ -254,9 +283,12 @@ def evaluate_code(code, language):
254
 
255
  def synchronous_evaluate(input_data):
256
  """Synchronously evaluate code, compatible with original interface"""
 
 
257
  # Create a task and queue it
258
  task_info = enqueue_task(input_data)
259
  task_id = task_info['task_id']
 
260
 
261
  # Wait for task to complete
262
  while True:
@@ -264,6 +296,7 @@ def synchronous_evaluate(input_data):
264
  if task_id in task_status:
265
  status = task_status[task_id]['status']
266
  if status == 'completed':
 
267
  result = task_status[task_id]['result']
268
  # Keep the result in status for a short time to ensure it shows in history
269
  if 'end_time' not in task_status[task_id]:
@@ -272,6 +305,7 @@ def synchronous_evaluate(input_data):
272
  task_status.pop(task_id, None)
273
  return result
274
  elif status == 'error':
 
275
  error = task_status[task_id].get('error', 'Unknown error')
276
  # Keep the error in status for a short time to ensure it shows in history
277
  if 'end_time' not in task_status[task_id]:
@@ -279,6 +313,8 @@ def synchronous_evaluate(input_data):
279
  elif time.time() - task_status[task_id]['end_time'] > 5:
280
  task_status.pop(task_id, None)
281
  return {"status": "Exception", "error": error}
 
 
282
 
283
  time.sleep(0.1)
284
 
@@ -298,6 +334,8 @@ def _get_estimated_time_for_task(language, complexity):
298
 
299
  def enqueue_task(input_data):
300
  """Add task to queue"""
 
 
301
  if isinstance(input_data, list) and len(input_data) > 0:
302
  sample_task = input_data[0]
303
  language = sample_task.get('language', 'unknown') if isinstance(sample_task, dict) else 'unknown'
@@ -311,15 +349,22 @@ def enqueue_task(input_data):
311
  estimated_time_per_task = _get_estimated_time_for_task(language, task_complexity)
312
  estimated_total_time = estimated_time_per_task * task_size
313
 
314
- task_id = str(uuid.uuid4())
 
315
  request_time = time.time()
316
 
 
 
 
 
 
 
317
  # Add to task_status with 'queued' status first
318
  with lock:
319
  task_status[task_id] = {
320
  'status': 'queued',
321
  'queued_time': request_time,
322
- 'queue_position': task_queue.qsize() + 1,
323
  'estimated_factors': {
324
  'language': language,
325
  'size': task_size,
@@ -327,13 +372,25 @@ def enqueue_task(input_data):
327
  },
328
  'estimated_time': estimated_total_time
329
  }
 
330
 
331
  # Get queue info for wait time estimation
332
  queue_info = get_queue_status()
333
  est_wait = queue_info['estimated_wait']
 
334
 
335
  # Add to the task queue - this must be done AFTER adding to task_status
336
  task_queue.put((task_id, input_data, request_time))
 
 
 
 
 
 
 
 
 
 
337
 
338
  return {
339
  'task_id': task_id,
@@ -366,6 +423,12 @@ def get_queue_status():
366
  active_tasks = processing_count
367
  waiting_tasks = len(queued_tasks)
368
 
 
 
 
 
 
 
369
  remaining_processing_time = 0
370
  for task in processing_tasks:
371
  if 'start_time' in task and 'estimated_time' in task:
@@ -433,6 +496,18 @@ def ui_get_queue_info():
433
  """Get queue info for UI"""
434
  queue_info = get_queue_status()
435
 
 
 
 
 
 
 
 
 
 
 
 
 
436
  tasks_html = ""
437
  for task in reversed(queue_info['recent_tasks']):
438
  tasks_html += f"""
@@ -450,50 +525,60 @@ def ui_get_queue_info():
450
  </tr>
451
  """
452
 
453
- # 增加更多详细信息到UI
454
  queue_details = ""
455
  if queue_info['waiting_tasks'] > 0:
456
- queue_details = f"<p>当前有 {queue_info['waiting_tasks']} 个任务在等待队列中,预计等待时间:{format_time(queue_info['estimated_wait'])}</p>"
 
 
 
 
 
 
457
 
458
  processing_details = ""
459
  if queue_info['active_tasks'] > 0:
460
- processing_details = f"<p>当前有 {queue_info['active_tasks']} 个任务正在处理中</p>"
 
 
 
 
461
 
462
  return f"""
463
  <div class="dashboard">
464
  <div class="queue-info-card main-card">
465
- <h3 class="card-title">队列状态监控</h3>
466
  <div class="queue-stats">
467
  <div class="stat-item">
468
  <div class="stat-value">{queue_info['waiting_tasks']}</div>
469
- <div class="stat-label">等待中</div>
470
  </div>
471
  <div class="stat-item">
472
  <div class="stat-value">{queue_info['active_tasks']}</div>
473
- <div class="stat-label">处理中</div>
474
  </div>
475
  <div class="stat-item">
476
  <div class="stat-value">{queue_info['worker_threads']}</div>
477
- <div class="stat-label">工作线程</div>
478
  </div>
479
  </div>
480
 
481
  <div class="wait-time">
482
- <p><b>预计等待时间:</b> {format_time(queue_info['estimated_wait'])}</p>
483
  {queue_details}
484
  {processing_details}
485
- <p class="last-update"><small>最后更新时间: {datetime.now().strftime('%H:%M:%S')}</small></p>
486
  </div>
487
  </div>
488
 
489
  <div class="queue-info-card history-card">
490
- <h3 class="card-title">最近处理的任务</h3>
491
  <table class="recent-tasks">
492
  <thead>
493
  <tr>
494
- <th>任务ID</th>
495
- <th>请求时间</th>
496
- <th>处理时间</th>
497
  </tr>
498
  </thead>
499
  <tbody>
@@ -666,6 +751,41 @@ button.primary {
666
  button.primary:hover {
667
  background-color: #3367d6;
668
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
669
  """
670
 
671
  # Initialize and launch worker threads
@@ -673,14 +793,14 @@ launch_workers()
673
 
674
  # Create Gradio interface
675
  with gr.Blocks(css=custom_css) as demo:
676
- gr.Markdown("# 代码评估服务")
677
- gr.Markdown("支持多种编程语言的代码评估服务,使用队列机制处理请求")
678
 
679
  with gr.Row():
680
  with gr.Column(scale=3):
681
  # Queue status info card
682
  queue_info_html = gr.HTML()
683
- refresh_queue_btn = gr.Button("刷新队列状态", variant="primary")
684
 
685
  # Hidden API interface components
686
  with gr.Row(visible=False):
@@ -697,14 +817,16 @@ with gr.Blocks(css=custom_css) as demo:
697
  def update_queue_info():
698
  return ui_get_queue_info()
699
 
700
- # Update queue info periodically
701
- demo.load(update_queue_info, None, queue_info_html, every=1)
702
 
703
  # Refresh button event
704
  refresh_queue_btn.click(update_queue_info, None, queue_info_html)
705
 
 
 
 
706
  # Add evaluation endpoint compatible with original interface
707
- demo.queue()
708
  evaluate_endpoint = demo.load(fn=synchronous_evaluate, inputs=api_input, outputs=api_output, api_name="evaluate")
709
 
710
  # Add async evaluation endpoint
@@ -714,8 +836,10 @@ with gr.Blocks(css=custom_css) as demo:
714
  status_endpoint = demo.load(fn=check_status, inputs=status_check_input, outputs=status_check_output, api_name="status")
715
 
716
  if __name__ == "__main__":
 
717
  try:
718
  demo.launch()
719
  finally:
720
  # Stop worker threads
721
- running = False
 
 
32
  # Lock for shared resources
33
  lock = threading.Lock()
34
  # Number of worker threads - set to 1 to process one task at a time
35
+ worker_threads = 1
36
  # Flag for running background threads
37
  running = True
38
  # Mapping from task type to processing time
 
41
  processing_count = 0
42
  # Available CPU cores for task processing
43
  available_cores = multiprocessing.cpu_count()
44
+ # Task ID counter for debugging
45
+ task_counter = 0
46
+
47
+ # Enable logging
48
+ DEBUG_MODE = True
49
+
50
+ def debug_log(message):
51
+ """Log debug messages if debug mode is enabled"""
52
+ if DEBUG_MODE:
53
+ print(f"[DEBUG] {datetime.now().strftime('%H:%M:%S')} - {message}")
54
 
55
  def queue_processor():
56
  """Process tasks in the queue"""
 
64
  # Already processing a task, wait and try again
65
  time.sleep(0.5)
66
  continue
67
+
68
+ # Check queue size before attempting to get a task
69
+ queue_size = task_queue.qsize()
70
+ if queue_size > 0:
71
+ debug_log(f"Queue processor found {queue_size} tasks waiting")
72
+
73
  # Get a task from the queue with small timeout to prevent blocking
74
  task_id, input_data, request_time = task_queue.get(timeout=0.1)
75
+ debug_log(f"Processing task {task_id}")
76
 
77
  # Increment processing count to track active tasks
78
  with lock:
79
  processing_count += 1
80
+ debug_log(f"Incremented processing count to {processing_count}")
81
 
82
  # Update task status
83
  if task_id in task_status:
84
  task_status[task_id]['status'] = 'processing'
85
  task_status[task_id]['start_time'] = time.time()
86
+ debug_log(f"Updated existing task {task_id} to processing state")
87
  else:
88
  # Create task status entry if it doesn't exist
89
  task_status[task_id] = {
 
91
  'queued_time': request_time,
92
  'start_time': time.time()
93
  }
94
+ debug_log(f"Created new task status entry for {task_id}")
95
 
96
  if isinstance(input_data, list) and len(input_data) > 0:
97
  sample_task = input_data[0]
 
106
  'complexity': task_complexity
107
  }
108
 
109
+ debug_log(f"Starting evaluation for task {task_id}")
110
  result = evaluate(input_data)
111
+ debug_log(f"Finished evaluation for task {task_id}")
112
 
113
  end_time = time.time()
114
  process_time = end_time - task_status[task_id]['start_time']
 
116
  with lock:
117
  # Decrease processing count now that we're done
118
  processing_count -= 1
119
+ debug_log(f"Decremented processing count to {processing_count}")
120
 
121
  # Update task status
122
  task_status[task_id]['status'] = 'completed'
123
  task_status[task_id]['result'] = result
124
  task_status[task_id]['end_time'] = end_time
125
  task_status[task_id]['process_time'] = process_time
126
+ debug_log(f"Updated task {task_id} to completed state")
127
 
128
  if 'estimated_factors' in task_status[task_id]:
129
  factors = task_status[task_id]['estimated_factors']
 
147
  task_history.pop(0)
148
 
149
  task_queue.task_done()
150
+ debug_log(f"Task {task_id} completed and marked as done")
151
 
152
  except queue.Empty:
153
  continue
154
  except Exception as e:
155
+ debug_log(f"Error in queue processor: {str(e)}")
156
  if 'task_id' in locals():
157
+ debug_log(f"Error occurred while processing task {task_id}")
158
  with lock:
159
  # Decrease processing count on error
160
  processing_count -= 1
161
+ debug_log(f"Decremented processing count to {processing_count} due to error")
162
 
163
  if task_id in task_status:
164
  task_status[task_id]['status'] = 'error'
165
  task_status[task_id]['error'] = str(e)
166
  task_status[task_id]['end_time'] = time.time()
167
+ debug_log(f"Updated task {task_id} to error state")
168
  else:
169
  task_status[task_id] = {
170
  'status': 'error',
171
  'error': str(e),
172
  'end_time': time.time()
173
  }
174
+ debug_log(f"Created new error entry for task {task_id}")
175
  task_queue.task_done()
176
 
177
  def _estimate_task_complexity(tasks):
 
283
 
284
  def synchronous_evaluate(input_data):
285
  """Synchronously evaluate code, compatible with original interface"""
286
+ debug_log(f"Received synchronous evaluation request")
287
+
288
  # Create a task and queue it
289
  task_info = enqueue_task(input_data)
290
  task_id = task_info['task_id']
291
+ debug_log(f"Created task {task_id} for synchronous evaluation")
292
 
293
  # Wait for task to complete
294
  while True:
 
296
  if task_id in task_status:
297
  status = task_status[task_id]['status']
298
  if status == 'completed':
299
+ debug_log(f"Task {task_id} completed, returning result")
300
  result = task_status[task_id]['result']
301
  # Keep the result in status for a short time to ensure it shows in history
302
  if 'end_time' not in task_status[task_id]:
 
305
  task_status.pop(task_id, None)
306
  return result
307
  elif status == 'error':
308
+ debug_log(f"Task {task_id} failed with error")
309
  error = task_status[task_id].get('error', 'Unknown error')
310
  # Keep the error in status for a short time to ensure it shows in history
311
  if 'end_time' not in task_status[task_id]:
 
313
  elif time.time() - task_status[task_id]['end_time'] > 5:
314
  task_status.pop(task_id, None)
315
  return {"status": "Exception", "error": error}
316
+ else:
317
+ debug_log(f"Task {task_id} still in status: {status}")
318
 
319
  time.sleep(0.1)
320
 
 
334
 
335
  def enqueue_task(input_data):
336
  """Add task to queue"""
337
+ global task_counter
338
+
339
  if isinstance(input_data, list) and len(input_data) > 0:
340
  sample_task = input_data[0]
341
  language = sample_task.get('language', 'unknown') if isinstance(sample_task, dict) else 'unknown'
 
349
  estimated_time_per_task = _get_estimated_time_for_task(language, task_complexity)
350
  estimated_total_time = estimated_time_per_task * task_size
351
 
352
+ task_counter += 1
353
+ task_id = f"task_{task_counter}_{str(uuid.uuid4())}"
354
  request_time = time.time()
355
 
356
+ debug_log(f"Creating new task: {task_id}")
357
+
358
+ # Get current queue status before adding to task_status
359
+ current_queue_size = task_queue.qsize()
360
+ debug_log(f"Current queue size before adding: {current_queue_size}")
361
+
362
  # Add to task_status with 'queued' status first
363
  with lock:
364
  task_status[task_id] = {
365
  'status': 'queued',
366
  'queued_time': request_time,
367
+ 'queue_position': current_queue_size + 1,
368
  'estimated_factors': {
369
  'language': language,
370
  'size': task_size,
 
372
  },
373
  'estimated_time': estimated_total_time
374
  }
375
+ debug_log(f"Added task {task_id} to task_status with queue position {current_queue_size + 1}")
376
 
377
  # Get queue info for wait time estimation
378
  queue_info = get_queue_status()
379
  est_wait = queue_info['estimated_wait']
380
+ debug_log(f"Estimated wait time for task {task_id}: {est_wait} seconds")
381
 
382
  # Add to the task queue - this must be done AFTER adding to task_status
383
  task_queue.put((task_id, input_data, request_time))
384
+ debug_log(f"Added task {task_id} to task_queue")
385
+
386
+ # Double-check queue status after adding
387
+ new_queue_size = task_queue.qsize()
388
+ debug_log(f"New queue size after adding: {new_queue_size}")
389
+
390
+ # Count queued tasks in task_status
391
+ with lock:
392
+ queued_count = sum(1 for t in task_status.values() if t['status'] == 'queued')
393
+ debug_log(f"Total tasks with 'queued' status: {queued_count}")
394
 
395
  return {
396
  'task_id': task_id,
 
423
  active_tasks = processing_count
424
  waiting_tasks = len(queued_tasks)
425
 
426
+ debug_log(f"Queue status: size={queue_size}, active={active_tasks}, waiting={waiting_tasks}")
427
+ debug_log(f"Queue status details: {len(queued_tasks)} queued tasks found in task_status")
428
+ if queued_tasks:
429
+ task_ids = [t.get('task_id', 'unknown') for t in queued_tasks if 'task_id' in t]
430
+ debug_log(f"Queued task IDs: {task_ids}")
431
+
432
  remaining_processing_time = 0
433
  for task in processing_tasks:
434
  if 'start_time' in task and 'estimated_time' in task:
 
496
  """Get queue info for UI"""
497
  queue_info = get_queue_status()
498
 
499
+ # List queued tasks with details
500
+ queued_tasks_html = ""
501
+ with lock:
502
+ queued_tasks = [t for t in task_status.values() if t['status'] == 'queued']
503
+ if queued_tasks:
504
+ queued_tasks_html = "<div class='queued-tasks'><h4>Tasks in Queue:</h4><ul>"
505
+ for idx, task in enumerate(sorted(queued_tasks, key=lambda x: x.get('queued_time', 0))):
506
+ task_id = next((k for k, v in task_status.items() if v == task), "unknown")
507
+ queued_time = datetime.fromtimestamp(task.get('queued_time', 0)).strftime('%H:%M:%S')
508
+ queued_tasks_html += f"<li>Task {task_id[:8]}... - Queued at {queued_time} - Position {idx+1}</li>"
509
+ queued_tasks_html += "</ul></div>"
510
+
511
  tasks_html = ""
512
  for task in reversed(queue_info['recent_tasks']):
513
  tasks_html += f"""
 
525
  </tr>
526
  """
527
 
528
+ # Add more detailed queue information
529
  queue_details = ""
530
  if queue_info['waiting_tasks'] > 0:
531
+ queue_details = f"""
532
+ <div class="alert alert-info">
533
+ <p><strong>Currently {queue_info['waiting_tasks']} tasks in queue</strong></p>
534
+ <p>Estimated wait time: {format_time(queue_info['estimated_wait'])}</p>
535
+ {queued_tasks_html}
536
+ </div>
537
+ """
538
 
539
  processing_details = ""
540
  if queue_info['active_tasks'] > 0:
541
+ processing_details = f"""
542
+ <div class="alert alert-warning">
543
+ <p><strong>Currently {queue_info['active_tasks']} tasks being processed</strong></p>
544
+ </div>
545
+ """
546
 
547
  return f"""
548
  <div class="dashboard">
549
  <div class="queue-info-card main-card">
550
+ <h3 class="card-title">Queue Status Monitor</h3>
551
  <div class="queue-stats">
552
  <div class="stat-item">
553
  <div class="stat-value">{queue_info['waiting_tasks']}</div>
554
+ <div class="stat-label">Waiting</div>
555
  </div>
556
  <div class="stat-item">
557
  <div class="stat-value">{queue_info['active_tasks']}</div>
558
+ <div class="stat-label">Processing</div>
559
  </div>
560
  <div class="stat-item">
561
  <div class="stat-value">{queue_info['worker_threads']}</div>
562
+ <div class="stat-label">Worker Threads</div>
563
  </div>
564
  </div>
565
 
566
  <div class="wait-time">
567
+ <p><b>Estimated Wait Time:</b> {format_time(queue_info['estimated_wait'])}</p>
568
  {queue_details}
569
  {processing_details}
570
+ <p class="last-update"><small>Last update: {datetime.now().strftime('%H:%M:%S')}</small></p>
571
  </div>
572
  </div>
573
 
574
  <div class="queue-info-card history-card">
575
+ <h3 class="card-title">Recently Processed Tasks</h3>
576
  <table class="recent-tasks">
577
  <thead>
578
  <tr>
579
+ <th>Task ID</th>
580
+ <th>Request Time</th>
581
+ <th>Processing Time</th>
582
  </tr>
583
  </thead>
584
  <tbody>
 
751
  button.primary:hover {
752
  background-color: #3367d6;
753
  }
754
+
755
+ .alert {
756
+ padding: 12px;
757
+ margin: 10px 0;
758
+ border-radius: 6px;
759
+ }
760
+
761
+ .alert-info {
762
+ background-color: #d1ecf1;
763
+ color: #0c5460;
764
+ border: 1px solid #bee5eb;
765
+ }
766
+
767
+ .alert-warning {
768
+ background-color: #fff3cd;
769
+ color: #856404;
770
+ border: 1px solid #ffeeba;
771
+ }
772
+
773
+ .queued-tasks {
774
+ text-align: left;
775
+ margin: 10px 0;
776
+ padding: 8px;
777
+ background: rgba(255, 255, 255, 0.5);
778
+ border-radius: 4px;
779
+ }
780
+
781
+ .queued-tasks ul {
782
+ margin: 5px 0;
783
+ padding-left: 20px;
784
+ }
785
+
786
+ .queued-tasks li {
787
+ margin-bottom: 3px;
788
+ }
789
  """
790
 
791
  # Initialize and launch worker threads
 
793
 
794
  # Create Gradio interface
795
  with gr.Blocks(css=custom_css) as demo:
796
+ gr.Markdown("# Code Evaluation Service")
797
+ gr.Markdown("Code evaluation service supporting multiple programming languages, using queue mechanism to process requests")
798
 
799
  with gr.Row():
800
  with gr.Column(scale=3):
801
  # Queue status info card
802
  queue_info_html = gr.HTML()
803
+ refresh_queue_btn = gr.Button("Refresh Queue Status", variant="primary")
804
 
805
  # Hidden API interface components
806
  with gr.Row(visible=False):
 
817
  def update_queue_info():
818
  return ui_get_queue_info()
819
 
820
+ # Update queue info more frequently
821
+ demo.load(update_queue_info, None, queue_info_html, every=0.5)
822
 
823
  # Refresh button event
824
  refresh_queue_btn.click(update_queue_info, None, queue_info_html)
825
 
826
+ # Force sync when handling API requests to prevent gradio's queue from interfering
827
+ demo.queue(concurrency_count=1, max_size=100)
828
+
829
  # Add evaluation endpoint compatible with original interface
 
830
  evaluate_endpoint = demo.load(fn=synchronous_evaluate, inputs=api_input, outputs=api_output, api_name="evaluate")
831
 
832
  # Add async evaluation endpoint
 
836
  status_endpoint = demo.load(fn=check_status, inputs=status_check_input, outputs=status_check_output, api_name="status")
837
 
838
  if __name__ == "__main__":
839
+ debug_log("Starting application")
840
  try:
841
  demo.launch()
842
  finally:
843
  # Stop worker threads
844
+ running = False
845
+ debug_log("Shutting down application")