File size: 48,398 Bytes
35f521b
 
d970538
31a0966
 
 
d970538
14eda64
d970538
 
31a0966
d970538
14eda64
31a0966
074e304
31a0966
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d970538
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31a0966
 
d970538
 
 
 
 
31a0966
 
 
 
 
 
 
 
 
 
d970538
31a0966
 
 
 
 
 
 
 
 
 
 
 
 
 
d970538
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31a0966
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
074e304
31a0966
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d970538
31a0966
 
 
 
d970538
31a0966
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d970538
31a0966
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d970538
31a0966
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d970538
31a0966
 
 
d970538
 
 
 
 
 
 
31a0966
 
 
d970538
 
 
 
31a0966
 
 
d970538
 
 
 
 
 
31a0966
 
 
d970538
 
 
 
31a0966
 
 
d970538
 
31a0966
d970538
 
 
 
31a0966
 
 
 
 
 
 
d970538
 
 
31a0966
 
d970538
31a0966
d970538
31a0966
 
 
 
 
 
 
d970538
31a0966
 
 
d970538
31a0966
 
 
 
 
d970538
31a0966
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d970538
 
31a0966
 
 
 
 
d970538
31a0966
d970538
 
 
 
 
 
 
 
 
31a0966
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
import os
os.environ["STREAMLIT_GLOBAL_CONFIG"] = "/data/.streamlit/config.toml"
import time
import uuid
import random
import urllib.parse # To parse URL parameters
from functools import lru_cache
import streamlit as st
import numpy as np
import pandas as pd
import duckdb
import hdbscan

# Database file path
DB_PATH = 'steampolis.duckdb'

# Initialize database tables if they don't exist
def initialize_database():
    try:
        init_con = duckdb.connect(database=DB_PATH, read_only=False)
        init_con.execute("""
            CREATE TABLE IF NOT EXISTS topics (
                id TEXT PRIMARY KEY,
                name TEXT NOT NULL,
                description TEXT,
                created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
            )
        """)
        init_con.execute("""
            CREATE TABLE IF NOT EXISTS users (
                id TEXT PRIMARY KEY,
                username TEXT NOT NULL UNIQUE,
                created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
            )
        """)
        init_con.execute("""
            CREATE TABLE IF NOT EXISTS comments (
                id TEXT PRIMARY KEY,
                topic_id TEXT NOT NULL,
                user_id TEXT NOT NULL,
                content TEXT NOT NULL,
                created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                FOREIGN KEY (topic_id) REFERENCES topics(id),
                FOREIGN KEY (user_id) REFERENCES users(id)
            )
        """)
        init_con.execute("""
            CREATE TABLE IF NOT EXISTS votes (
                id TEXT PRIMARY KEY,
                user_id TEXT NOT NULL,
                comment_id TEXT NOT NULL,
                vote_type TEXT NOT NULL CHECK (vote_type IN ('agree', 'disagree', 'neutral')),
                created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                FOREIGN KEY (user_id) REFERENCES users(id),
                FOREIGN KEY (comment_id) REFERENCES comments(id),
                UNIQUE (user_id, comment_id)
            )
        """)
        init_con.execute("""
            CREATE TABLE IF NOT EXISTS user_comment_collections (
                id TEXT PRIMARY KEY,
                user_id TEXT NOT NULL,
                comment_id TEXT NOT NULL,
                collected_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                FOREIGN KEY (user_id) REFERENCES users(id),
                FOREIGN KEY (comment_id) REFERENCES comments(id),
                UNIQUE (user_id, comment_id)
            )
        """)
        init_con.execute("""
            CREATE TABLE IF NOT EXISTS user_progress (
                id TEXT PRIMARY KEY,
                user_id TEXT NOT NULL,
                topic_id TEXT NOT NULL,
                last_comment_id_viewed TEXT,
                last_viewed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                FOREIGN KEY (user_id) REFERENCES users(id),
                FOREIGN KEY (topic_id) REFERENCES topics(id),
                FOREIGN KEY (last_comment_id_viewed) REFERENCES comments(id),
                UNIQUE (user_id, topic_id)
            )
        """)

        # Create system user if it doesn't exist
        try:
            init_con.execute("""
                INSERT INTO users (id, username)
                VALUES ('system', 'System')
                ON CONFLICT (id) DO NOTHING
            """)
        except Exception as e:
            print(f"Warning: Could not create system user: {e}")

    except Exception as e:
        st.error(f"Database initialization failed: {e}")
    finally:
        if 'init_con' in locals() and init_con:
            init_con.close()

def get_ttl_hash(seconds=360):
    """Return the same value withing `seconds` time period"""
    return round(time.time() / seconds)

# Helper function to get the R matrix from user voting data
# This matrix represents user-comment interactions (votes)
# Users are rows, comments are columns.
# Values: 1 for 'agree', 0 for 'neutral', -1 for 'disagree', NaN for unvoted.
# Requires pandas and numpy.
def get_r_matrix_from_votes():
    local_con = None
    try:
        local_con = duckdb.connect(database=DB_PATH, read_only=True) # Read-only is sufficient

        # Fetch all vote data
        # fetchdf requires pandas
        votes_df = local_con.execute("""
            SELECT user_id, comment_id, vote_type
            FROM votes
        """).fetchdf()

        if votes_df.empty:
            # Return empty matrix and mappings if no votes exist
            # pd.DataFrame requires pandas
            return pd.DataFrame(), {}, {}

        # Map vote types to numerical values
        vote_mapping = {'agree': 1, 'neutral': 0, 'disagree': -1}
        votes_df['vote_value'] = votes_df['vote_type'].map(vote_mapping)

        # Create the R matrix using pivot_table
        # This automatically handles missing user-comment pairs by filling with NaN
        # pivot_table requires pandas
        r_matrix = votes_df.pivot_table(
            index='user_id',
            columns='comment_id',
            values='vote_value'
        )

        # Create mappings from user/comment IDs to matrix indices (optional but useful)
        user_id_to_index = {user_id: i for i, user_id in enumerate(r_matrix.index)}
        comment_id_to_index = {comment_id: i for i, comment_id in enumerate(r_matrix.columns)}

        return r_matrix, user_id_to_index, comment_id_to_index

    except Exception as e:
        # st.error is not available here, just print or log
        print(f"Error generating R matrix: {e}")
        # Return empty results in case of error
        # pd.DataFrame requires pandas
        return pd.DataFrame(), {}, {}
    finally:
        if local_con:
            local_con.close()


# Custom Hamming-like distance function handling NaNs for clustering
# Assumes numpy is imported as np
def hamming_distance_with_nan(u1, u2):
    """
    Calculates a Hamming-like distance between two vectors (user vote profiles)
    ignoring positions where either value is NaN.

    Args:
        u1 (np.ndarray or pd.Series): First vector.
        u2 (np.ndarray or pd.Series): Second vector.

    Returns:
        float: The proportion of differing elements among non-NaN positions.
               Returns 0.0 if vectors are identical (including all NaN),
               1.0 if different but no common non-NaN positions.
    """
    u1 = np.asarray(u1)
    u2 = np.asarray(u2)

    # Find positions where both are not NaN
    both_not_nan_mask = ~np.isnan(u1) & ~np.isnan(u2)

    # If no common non-NaN values
    if not np.any(both_not_nan_mask):
         # If vectors are identical (e.g., both all NaN), distance is 0.
         # If different vectors with no common non-NaN, distance is 1 (max difference).
         if np.array_equal(u1, u2, equal_nan=True):
              return 0.0
         else:
              return 1.0

    # Filter to only positions where both are not NaN
    u1_filtered = u1[both_not_nan_mask]
    u2_filtered = u2[both_not_nan_mask]

    # Calculate proportion of differing elements among common non-NaN positions
    diff_count = np.sum(u1_filtered != u2_filtered)
    total_count = len(u1_filtered)

    return diff_count / total_count


# Function to get clusters using HDBSCAN with the custom Hamming distance
# Assumes pandas is imported as pd, numpy as np, and hdbscan is imported
def get_clusters_from_r_matrix(r_matrix):
    """
    Performs HDBSCAN clustering on the R matrix using a custom Hamming-like distance
    that handles NaN values.

    Args:
        r_matrix (pd.DataFrame): The user-comment vote matrix from get_r_matrix_from_votes.
                                 Index should be user_id, columns comment_id.

    Returns:
        np.ndarray: An array of cluster labels for each user in the r_matrix index.
                    -1 indicates noise. Returns empty array if clustering fails or
                    r_matrix is empty.
    """
    # Check if r_matrix is empty
    if r_matrix.empty:
        print("R matrix is empty, cannot perform clustering.")
        return np.array([])

    try:
        # Instantiate HDBSCAN with the custom metric
        # Using default parameters for min_cluster_size and min_samples
        # These might need tuning based on data characteristics and desired cluster granularity
        # allow_single_cluster=True prevents an error if all points form one cluster
        clusterer = hdbscan.HDBSCAN(metric=hamming_distance_with_nan, allow_single_cluster=True)

        # Fit the model directly to the DataFrame values
        # HDBSCAN fit expects a numpy array or similar structure
        clusterer.fit(r_matrix.values)

        # Return the cluster labels
        return clusterer.labels_

    except Exception as e:
        # In a Streamlit app context, st.error would be better, but not available here.
        # Print to console/logs.
        print(f"Error during HDBSCAN clustering: {e}")
        return np.array([]) # Return empty array on error


def get_cluster_labels():
    r_matrix, user_id_to_index, _ = get_r_matrix_from_votes()
    cluster_labels = get_clusters_from_r_matrix(r_matrix)
    if len(cluster_labels) == 0:
        cluster_labels = [0] * len(user_id_to_index)
    return cluster_labels, user_id_to_index


# Function to get the cluster label for a specific user
@lru_cache()
def get_user_cluster_label(user_id, ttl_hash=None):
    """
    Gets the HDBSCAN cluster label for a specific user and a list of users
    sharing the same cluster.

    Args:
        user_id (str): The ID of the user.

    Returns:
        tuple: A tuple containing:
            - int or None: The cluster label (an integer, -1 for noise) if the user
                           is found in the clustering result, otherwise None.
            - list[str]: A list of user IDs (including the input user_id if found)
                         that belong to the same cluster. Returns an empty list
                         if the user is not found or has no cluster label.
    """
    # get_cluster_labels is already cached, so calling it repeatedly is fine
    cluster_labels, user_id_to_index = get_cluster_labels()

    # Create a reverse mapping from index to user_id for easier lookup
    index_to_user_id = {index: uid for uid, index in user_id_to_index.items()}

    target_cluster_label = None
    same_cluster_users = []

    # Check if the user_id exists in the mapping
    if user_id in user_id_to_index:
        user_index = user_id_to_index[user_id]
        # Ensure the index is within the bounds of the cluster_labels array
        if 0 <= user_index < len(cluster_labels):
            target_cluster_label = int(cluster_labels[user_index]) # Get the target label

            # Find all users with the same cluster label
            for index, current_user_id in index_to_user_id.items():
                # Ensure the index is valid for cluster_labels
                if 0 <= index < len(cluster_labels):
                    current_user_label = int(cluster_labels[index])
                    if current_user_label == target_cluster_label:
                        same_cluster_users.append(current_user_id)
                else:
                     # This case should ideally not happen if index_to_user_id is consistent
                     print(f"Warning: Index {index} from index_to_user_id out of bounds for cluster labels array length {len(cluster_labels)}")


        else:
            # This case should ideally not happen if user_id_to_index is consistent
            print(f"Warning: User index {user_index} out of bounds for cluster labels array length {len(cluster_labels)}")
            # Return None and empty list as user couldn't be processed
            return None, []
    else:
        # User not found in the R matrix used for clustering (e.g., new user with no votes)
        # print(f"User ID {user_id} not found in clustering data.") # Optional: for debugging
        # Return None and empty list as user is not part of the current clustering result
        return None, []

    # Return the target user's label and the list of users in that cluster
    return target_cluster_label, same_cluster_users


# Helper function to get top k most polarized comments for a list of users
def get_top_k_polarized_comments_for_users(user_ids, k=5):
    """
    Retrieves the top k comments most agreed or disagreed upon (most polarized)
    by a given list of users.

    Args:
        user_ids (list[str]): A list of user IDs.
        k (int): The number of top comments to retrieve.

    Returns:
        list[tuple]: A list of tuples, where each tuple contains
                     (comment_id, comment_content, average_vote_score),
                     ordered by the absolute value of the average score descending.
                     Returns an empty list if no votes are found for these users
                     or on error.
    """
    if not user_ids:
        # print("Warning: get_top_k_polarized_comments_for_users called with empty user_ids list.") # Optional debug
        return [] # Cannot query without user IDs

    local_con = None
    try:
        local_con = duckdb.connect(database=DB_PATH, read_only=True)

        # Use parameterized query for the list of user IDs
        # DuckDB's Python API handles lists for IN clauses
        query = """
            SELECT
                v.comment_id,
                c.content,
                AVG(CASE
                    WHEN v.vote_type = 'agree' THEN 1.0
                    WHEN v.vote_type = 'neutral' THEN 0.0
                    WHEN v.vote_type = 'disagree' THEN -1.0
                    ELSE NULL -- Should not happen with current data
                END) as average_vote_score
            FROM votes v
            JOIN comments c ON v.comment_id = c.id
            WHERE v.user_id IN (?)
            GROUP BY v.comment_id, c.content
            HAVING COUNT(v.user_id) > 0 -- Ensure at least one user from the list voted on this comment
            ORDER BY ABS(average_vote_score) DESC
            LIMIT ?
        """
        # Pass the list of user_ids and k as parameters
        result = local_con.execute(query, [user_ids, k]).fetchall()

        return result

    except Exception as e:
        # st.error is not available here, just print or log
        print(f"Error getting top k polarized comments for users {user_ids}: {e}")
        return [] # Return empty list on error
    finally:
        if local_con:
            local_con.close()


@lru_cache()
def estimate_group_voting_diversity(user_ids, topic_id):
    """
    Estimates the diversity of voting within a group of users for a specific topic.
    Diversity is measured by the average variance of numerical vote scores (-1, 0, 1)
    across comments that at least two users in the group have voted on.

    Args:
        user_ids (list[str]): A list of user IDs belonging to the group.
        topic_id (str): The ID of the topic.

    Returns:
        float: A diversity score between 0.0 and 1.0. 0.0 indicates no diversity
               (all users voted the same way on all shared comments), 1.0 indicates
               maximum possible diversity (e.g., half agree, half disagree on shared comments).
               Returns 0.0 if the group has less than 2 users or if no comments
               were voted on by at least two users in the group.
    """
    if not user_ids or len(user_ids) < 2:
        return 0.0

    local_con = None
    try:
        local_con = duckdb.connect(database=DB_PATH, read_only=True)

        # Get all votes for the given topic by the specified users
        # Join with comments to filter by topic_id
        query = """
            SELECT
                v.comment_id,
                v.user_id,
                v.vote_type
            FROM votes v
            JOIN comments c ON v.comment_id = c.id
            WHERE c.topic_id = ? AND v.user_id IN (?)
        """
        # DuckDB's Python API handles lists for IN clauses
        results = local_con.execute(query, [topic_id, user_ids]).fetchall()

        if not results:
            return 0.0 # No votes found for this group on this topic

        # Map vote types to numerical scores
        vote_map = {'agree': 1.0, 'neutral': 0.0, 'disagree': -1.0}

        # Group votes by comment ID
        votes_by_comment = {}
        for comment_id, user_id, vote_type in results:
            if comment_id not in votes_by_comment:
                votes_by_comment[comment_id] = []
            # Append the numerical vote score
            votes_by_comment[comment_id].append(vote_map.get(vote_type, 0.0)) # Default to 0.0 for unknown types

        # Calculate variance for comments voted on by at least two users in the group
        variances = []
        for comment_id, comment_votes in votes_by_comment.items():
            # Ensure the comment was voted on by at least two users from the input list
            if len(comment_votes) >= 2:
                # Use numpy to calculate variance
                variances.append(np.var(comment_votes))

        if not variances:
            return 0.0 # No comments voted on by at least two users in the group

        # The maximum possible variance for values in [-1, 0, 1] is 1.0
        # (e.g., half votes are 1, half are -1).
        # The average variance is already in the range [0, 1].
        average_variance = np.mean(variances)

        return average_variance

    except Exception as e:
        # st.error is not available here, just print or log
        print(f"Error estimating group voting diversity for topic {topic_id} and users {user_ids}: {e}")
        return 0.0 # Return 0.0 on error
    finally:
        if local_con:
            local_con.close()


# Helper function to name a group of users based on their participation and voting diversity
def name_user_group(user_ids, topic_id):
    """
    Generates a descriptive name and description for a group of users within a
    specific topic based on their participation level and voting diversity.

    Args:
        user_ids (list[str]): A list of user IDs belonging to the group.
        topic_id (str): The ID of the topic.

    Returns:
        tuple[str, str]: A tuple containing the name and description for the group.
                         Returns ("Silent Gathering", "This group has no members.")
                         or ("Unengaged Group", "No members of this group have voted on this topic.")
                         or ("Isolated Voices", "This topic has no voters yet.")
                         or ("Mysterious Gathering", "An error occurred while trying to name this group.")
                         in edge cases or on error.
    """
    # Handle empty user list
    if not user_ids:
        return "Silent Gathering", "This group has no members."

    local_con = None
    try:
        local_con = duckdb.connect(database=DB_PATH, read_only=True)

        # 1. Get total unique users who voted in the topic
        total_voters_result = local_con.execute("""
            SELECT COUNT(DISTINCT user_id)
            FROM votes v
            JOIN comments c ON v.comment_id = c.id
            WHERE c.topic_id = ?
        """, [topic_id]).fetchone()
        total_voters_in_topic = total_voters_result[0] if total_voters_result else 0

        # 2. Get unique users from the input list who voted in the topic
        # Filter user_ids to only those present in the votes table for this topic
        # DuckDB IN clause handles lists directly
        group_voters_result = local_con.execute("""
            SELECT COUNT(DISTINCT user_id)
            FROM votes v
            JOIN comments c ON v.comment_id = c.id
            WHERE c.topic_id = ? AND v.user_id IN (?)
        """, [topic_id, user_ids]).fetchone()
        group_voters_count = group_voters_result[0] if group_voters_result else 0

        # Handle case where no one in the group has voted on this topic
        if group_voters_count == 0:
             return "Unengaged Group", "No members of this group have voted on this topic."

        # Handle case where topic has no voters but the group somehow has voters (shouldn't happen if queries are correct)
        if total_voters_in_topic == 0:
             # This case is unlikely if group_voters_count > 0, but for safety
             return "Isolated Voices", "This topic has no voters yet."


        # 3. Calculate significance (proportion of group voters among all topic voters)
        significance_proportion = group_voters_count / total_voters_in_topic

        # 4. Get diversity score for the group
        diversity_score = estimate_group_voting_diversity(user_ids, topic_id)

        # 5. Determine name and description based on significance and diversity
        # Define thresholds (can be tuned)
        SIG_LOW_THRESHOLD = 0.1
        SIG_MED_THRESHOLD = 0.5 # High if > MED, Med if > LOW and <= MED, Low if <= LOW
        DIV_LOW_THRESHOLD = 0.2
        DIV_MED_THRESHOLD = 0.5 # High if > MED, Med if > LOW and <= MED, Low if <= LOW

        significance_level = "low"
        if significance_proportion > SIG_MED_THRESHOLD:
            significance_level = "high"
        elif significance_proportion > SIG_LOW_THRESHOLD:
            significance_level = "medium"

        diversity_level = "low"
        if diversity_score > DIV_MED_THRESHOLD:
            diversity_level = "high"
        elif diversity_score > DIV_LOW_THRESHOLD:
            diversity_level = "medium"

        # Assign names and descriptions based on levels
        if significance_level == "high":
            if diversity_level == "low":
                return "Likeheart Village", "A large group where opinions converge."
            elif diversity_level == "medium":
                return "Harmonious Assembly", "A significant gathering with mostly aligned views."
            else: # high diversity
                return "Vibrant Forum", "A large, active group with diverse perspectives."
        elif significance_level == "medium":
            if diversity_level == "low":
                return "Quiet Consensus", "A moderately sized group with little disagreement."
            elif diversity_level == "medium":
                return "Mixed Opinions", "A balanced group with varied viewpoints."
            else: # high diversity
                return "Lively Discussion", "A moderately sized group with strong, differing opinions."
        else: # low significance
            if diversity_level == "low":
                return "Echo Chamber Nook", "A small corner where similar thoughts resonate."
            elif diversity_level == "medium":
                return "Scattered Thoughts", "A small group with somewhat varied, isolated views."
            else: # high diversity
                return "Whispering Gallery", "A small group where many different ideas are quietly shared."

    except Exception as e:
        print(f"Error naming user group for topic {topic_id} and users {user_ids}: {e}")
        return "Mysterious Gathering", "An error occurred while trying to name this group." # Default name and description on error
    finally:
        if local_con:
            local_con.close()


# Helper function to get a random unvoted comment
def get_random_unvoted_comment(user_id, topic_id):
    new_area_comments = st.session_state.get("_new_area_comments", [])
    if len(new_area_comments) != 0:
        value = new_area_comments.pop()
        st.session_state._new_area_comments = new_area_comments
        return value[0], value[1]
    local_con = None
    try:
        local_con = duckdb.connect(database=DB_PATH, read_only=False)

        # First, check if there are any comments at all in the topic
        comment_count = local_con.execute("""
            SELECT COUNT(*) FROM comments WHERE topic_id = ?
        """, [topic_id]).fetchone()[0]

        if comment_count == 0:
            return None, "Share your insight!"

        # Attempt to get a random comment that the user has NOT voted on
        result = local_con.execute("""
            SELECT c.id, c.content
            FROM comments c
            WHERE c.topic_id = ?
            AND NOT EXISTS (
                SELECT 1 FROM votes v
                WHERE v.comment_id = c.id AND v.user_id = ?
            )
            ORDER BY RANDOM()
            LIMIT 1
        """, [topic_id, user_id]).fetchone()
        if result:
            # Check for cluster change and set message flag
            current_label, current_users = get_user_cluster_label(user_id, topic_id)
            current_users_set = set(current_users)

            previous_label = st.session_state.get('_previous_cluster_label')
            previous_users_set = st.session_state.get('_previous_cluster_users_set', set())

            # Check if cluster label has changed AND the set of users in the new cluster is different
            # This indicates the user has moved to a different group of commenters
            if current_label is not None and previous_label is not None and current_label != previous_label:
                if current_users_set != previous_users_set:
                    # Set a flag in session state to display the message later in the main rendering logic
                    st.session_state._show_new_area_message = True
                    new_area_comments = get_top_k_polarized_comments_for_users(current_users_set, k=5)
                    st.session_state._new_area_comments = new_area_comments
                    # print(f"DEBUG: Cluster changed for user {user_id} in topic {topic_id}: {previous_label} -> {current_label}")
                    # print(f"DEBUG: Previous users count: {len(previous_users_set)}, Current users count: {len(current_users_set)}")
            st.session_state._previous_cluster_label = current_label
            st.session_state._previous_cluster_users_set = current_users_set

            # Found an unvoted comment
            return result[0], result[1]
        else:
            # No unvoted comments found for this user in this topic
            return None, "No new thoughts for now..."

    except Exception as e:
        st.error(f"Error getting random unvoted comment: {e}")
        return None, f"Error loading comments: {str(e)}"
    finally:
        if local_con:
            local_con.close()

# Helper function to find or create a user
def find_or_create_user(username):
    local_con = None
    try:
        local_con = duckdb.connect(database=DB_PATH, read_only=False)
        user_result = local_con.execute("SELECT id FROM users WHERE username = ?", [username]).fetchone()
        if user_result:
            return user_result[0]
        else:
            user_id = str(uuid.uuid4())
            local_con.execute("INSERT INTO users (id, username) VALUES (?, ?)", [user_id, username])
            return user_id
    except Exception as e:
        st.error(f"Error finding or creating user: {e}")
        return None
    finally:
        if local_con:
            local_con.close()

# Helper function to update user progress
def update_user_progress(user_id, topic_id, comment_id):
    local_con = None
    try:
        local_con = duckdb.connect(database=DB_PATH, read_only=False)
        progress_id = str(uuid.uuid4())
        local_con.execute("""
            INSERT INTO user_progress (id, user_id, topic_id, last_comment_id_viewed) VALUES (?, ?, ?, ?)
            ON CONFLICT (user_id, topic_id) DO UPDATE SET
                last_comment_id_viewed = EXCLUDED.last_comment_id_viewed
        """, [progress_id, user_id, topic_id, comment_id])
    except Exception as e:
        st.error(f"Error updating user progress: {e}")
    finally:
        if local_con:
            local_con.close()

# Helper function to handle comment submission UI and logic
def share_wisdom(prompt, allow_skip=False):
    st.markdown(prompt)
    new_comment_text = st.text_area(f"Your Insight {'that different from others above (Empty to skip)' if allow_skip else ''}", key="new_comment_input")
    if st.button("Share Your Wisdom"):
        if new_comment_text:
            user_email = st.session_state.get('user_email', '')
            user_id = find_or_create_user(user_email) # Ensure user exists
            if user_id:
                local_con = None
                try:
                    local_con = duckdb.connect(database=DB_PATH, read_only=False)
                    comment_id = str(uuid.uuid4())
                    local_con.execute("""
                        INSERT INTO comments (id, topic_id, user_id, content)
                        VALUES (?, ?, ?, ?)
                    """, [comment_id, topic_id, user_id, new_comment_text])

                    # Append new comment to history
                    st.session_state.comment_history += f"\n\nπŸ’¬ {new_comment_text}"

                    # Get next comment (could be the one just submitted)
                    next_comment_id, next_comment_content = get_random_unvoted_comment(user_id, topic_id)
                    st.session_state.current_comment_id = next_comment_id
                    st.session_state.current_comment_content = next_comment_content

                    # Update progress
                    update_user_progress(user_id, topic_id, next_comment_id)

                    st.session_state.new_comment_input = "" # Clear input box
                    st.rerun() # Rerun to update UI

                except Exception as e:
                    st.error(f"Error sharing information: {e}")
                finally:
                    if local_con:
                        local_con.close()
            else:
                st.error("Could not find or create user.")
        elif allow_skip:
            return
        else:
            st.warning("Please enter your thought.")


# --- Page Functions ---

def home_page():
    st.title("Welcome to SteamPolis")
    st.markdown("Choose an option:")

    if st.button("Create New Topic (Quest)"):
        st.session_state.page = 'create_topic'
        st.rerun()

    st.markdown("---")
    st.markdown("Or join an existing topic (quest):")
    topic_input = st.text_input("Enter Topic ID or URL")

    if st.button("Join Topic"):
        topic_id = topic_input.strip()
        if topic_id.startswith('http'): # Handle full URL
             parsed_url = urllib.parse.urlparse(topic_id)
             query_params = urllib.parse.parse_qs(parsed_url.query)
             topic_id = query_params.get('topic', [None])[0]

        if topic_id:
            st.session_state.page = 'view_topic'
            st.session_state.current_topic_id = topic_id
            # Attempt to load email from session state (mimics browser state)
            # If email exists, handle email submission logic immediately on view page load
            st.rerun()
        else:
            st.warning("Please enter a valid Topic ID or URL.")


def create_topic_page():
    st.title("Create a New Topic")

    new_topic_name = st.text_input("Topic Name (Imagine you are the king, how would you share your concern)")
    new_topic_description = st.text_area('Description (Begin with "I want to figure out...", imagine you are the king, what would you want to know)', height=150)
    new_topic_seed_comments = st.text_area("Initial Comments (separate by new line, imagine there are civilians what will they answer)", height=200)
    creator_email = st.text_input("Enter your Email (required for creation)")

    if st.button("Create Topic"):
        if not creator_email:
            st.error("Email is required to create a topic.")
            return

        topic_id = str(uuid.uuid4())[:8]
        user_id = find_or_create_user(creator_email)

        if user_id:
            local_con = None
            try:
                local_con = duckdb.connect(database=DB_PATH, read_only=False)
                local_con.execute("INSERT INTO topics (id, name, description) VALUES (?, ?, ?)", [topic_id, new_topic_name, new_topic_description])

                seed_comments = [c.strip() for c in new_topic_seed_comments.split('\n') if c.strip()]
                for comment in seed_comments:
                    comment_id = str(uuid.uuid4())
                    local_con.execute("INSERT INTO comments (id, topic_id, user_id, content) VALUES (?, ?, ?, ?)",
                                      [comment_id, topic_id, 'system', comment])

                # Get the first comment to display after creation
                comment_to_display_id, comment_to_display_content = get_random_unvoted_comment(user_id, topic_id)

                # Set initial progress for creator
                update_user_progress(user_id, topic_id, comment_to_display_id)

                st.session_state.page = 'view_topic'
                st.session_state.current_topic_id = topic_id
                st.session_state.user_email = creator_email # Store email in session state
                st.session_state.current_comment_id = comment_to_display_id
                st.session_state.current_comment_content = comment_to_display_content
                st.session_state.comment_history = ""

                st.success(f"Topic '{new_topic_name}' created!")
                st.rerun()

            except Exception as e:
                st.error(f"Error creating topic: {e}")
            finally:
                if local_con:
                    local_con.close()
        else:
            st.error("Could not find or create user.")


    if st.button("Back to Home"):
        st.session_state.page = 'home'
        st.rerun()

def view_topic_page():
    topic_id = st.session_state.get('current_topic_id')
    user_email = st.session_state.get('user_email', '')
    current_comment_id = st.session_state.get('current_comment_id')
    current_comment_content = st.session_state.get('current_comment_content', "Loading comments...")
    comment_history = st.session_state.get('comment_history', "")
    show_new_area_message = st.session_state.get('_show_new_area_message', True)

    if not topic_id:
        st.warning("No topic selected. Returning to home.")
        st.session_state.page = 'home'
        st.rerun()
        return

    local_con = None
    topic_name = "Loading..."
    topic_description = "Loading..."

    try:
        local_con = duckdb.connect(database=DB_PATH, read_only=True)
        topic_result = local_con.execute("SELECT name, description FROM topics WHERE id = ?", [topic_id]).fetchone()
        if topic_result:
            topic_name, topic_description = topic_result
        else:
            st.error(f"Topic ID '{topic_id}' not found.")
            st.session_state.page = 'home'
            st.rerun()
            return
    except Exception as e:
        st.error(f"Error loading topic details: {e}")
        if local_con:
             local_con.close()
        st.session_state.page = 'home'
        st.rerun()
        return
    finally:
        if local_con:
            local_con.close()


    # Include functional information
    st.markdown(f"**Shareable Quest Scroll ID:** `{topic_id}`")
    # Construct shareable link using current app URL
    app_url = st.query_params.get('base', ['http://localhost:8501/'])[0] # Get base URL if available
    shareable_link = f"{app_url}?topic={topic_id}" if app_url else f"?topic={topic_id}"
    st.markdown(f"**Shareable Scroll Link:** `{shareable_link}`")

    st.title("Seeker Quest")

    # Check if user email is available in session state.
    # user_email is already retrieved from st.session_state at the start of view_topic_page.
    if user_email:
        # Get the user ID. find_or_create_user handles the DB connection internally.
        user_id = find_or_create_user(user_email)
        if user_id:
            # Check if user has any progress recorded for this specific topic.
            # This indicates they have viewed comments or interacted before.
            local_con = None
            progress_exists = False
            try:
                local_con = duckdb.connect(database=DB_PATH, read_only=True)
                # Query the user_progress table for a record matching user_id and topic_id
                result = local_con.execute("""
                    SELECT 1 FROM user_progress
                    WHERE user_id = ? AND topic_id = ?
                    LIMIT 1
                """, [user_id, topic_id]).fetchone()
                progress_exists = result is not None
            except Exception as e:
                # Log error but don't stop the app. Assume no progress on error.
                st.error(f"Error checking user progress for greeting: {e}")
                # progress_exists remains False
            finally:
                if local_con:
                    local_con.close()

            # Display the appropriate greeting based on progress
            if progress_exists:
                # Acknowledge return and remind of quest
                st.markdown("Welcome back, Seeker. Your journey through the whispers of Aethelgard continues.")
                st.markdown(f"You pause to recall the heart of the Emperor's concern regarding **{topic_name}**: `{topic_description}`.")

                # Introduce the next comment
                st.markdown("As you press onward, you encounter another soul willing to share their thoughts on this vital matter.")
            else:
                # Introduce the setting and the Emperor's concern
                st.markdown("Welcome, Seeker, to the ancient Kingdom of Aethelgard, a realm of digital whispers and forgotten wisdom.")
                st.markdown("For centuries, Aethelgard has stood, preserving the echoes of an age long past. But now, a matter of great weight troubles the Emperor's thoughts.")
                st.markdown(f"The Emperor seeks clarity on a crucial topic: **`{topic_name}`**.")

                # Explain the quest and the user's role
                st.markdown("You, among a select few, have been summoned for a vital quest: to traverse the kingdom, gather insights, and illuminate this matter for the throne.")
                st.markdown(f"At a recent royal gathering, the Emperor revealed the heart of their concern, the very essence of your mission: `{topic_description}`")

                # Transition to the task
                st.markdown("Your journey begins now. The path leads to the first village, where the voices of the realm await your ear.")


    # --- Email Prompt ---
    if not user_email:
        st.subheader("Enter your Email to view comments and progress")
        view_user_email_input = st.text_input("Your Email", key="view_email_input")
        if st.button("Submit Email", key="submit_view_email"):
            if view_user_email_input:
                st.session_state.user_email = view_user_email_input
                user_id = find_or_create_user(view_user_email_input)
                if user_id:
                    comment_to_display_id, comment_to_display_content = get_random_unvoted_comment(user_id, topic_id)
                    st.session_state.current_comment_id = comment_to_display_id
                    st.session_state.current_comment_content = comment_to_display_content
                    update_user_progress(user_id, topic_id, comment_to_display_id)
                    st.session_state.comment_history = "" # Reset history on new email submission
                    st.rerun()
                else:
                    st.error("Could not find or create user with that email.")
            else:
                st.warning("Please enter your email.")
        return # Stop rendering the rest until email is submitted

    # --- Comment Display and Voting ---
    # Define introductory phrases for encountering a new perspective
    intro_phrases = [
        "A new whisper reaches your ear",
        "You ponder a fresh perspective",
        "Another voice shares their view",
        "A thought emerges from the crowd",
        "The wind carries a new idea",
        "Someone offers an insight",
        "You overhear a comment",
        "A different angle appears",
        "The village elder shares",
        "A traveler murmurs",
    ]
    # Randomly select a phrase
    random_phrase = random.choice(intro_phrases)
    st.markdown(comment_history)

    if current_comment_id: # Only show voting if there's a comment to vote on
        # Display comment history and the current comment with the random intro
        if show_new_area_message == True:
            _, user_ids = get_user_cluster_label(user_id)
            new_area_name, desc = name_user_group(user_ids, topic_id)
            st.markdown(f"You've collected {len(comment_history.splitlines())} insights so far.")
            st.markdown(f"And yet a new place you have arrived: `{new_area_name}`. {desc}")
            st.session_state._show_new_area_message = False
        st.markdown(f"[Collected new insight, {random_phrase}]:\n* {current_comment_content}")

        # Handle vote logic
        def handle_vote(vote_type, comment_id, topic_id, user_id):
            # Add JavaScript to scroll to the bottom anchor after the page reloads
            # This script will be included in the next render cycle triggered by st.rerun()
            # Ensure an element with id="bottom" exists in the rendered page,
            # typically placed after the content you want to scroll to (e.g., comment history).
            local_con = None
            try:
                local_con = duckdb.connect(database=DB_PATH, read_only=False)
                # Use INSERT OR REPLACE INTO or ON CONFLICT DO UPDATE to handle repeat votes
                # The UNIQUE constraint on (user_id, comment_id) in the votes table
                # allows us to update the existing vote if one already exists for this user/comment pair.
                # We generate a new UUID for the 'id' column, but it will only be used
                # if this is a new insert. If it's an update, the existing 'id' is kept.
                vote_id = str(uuid.uuid4()) # Generate a new UUID for the potential insert
                local_con.execute("""
                    INSERT INTO votes (id, user_id, comment_id, vote_type)
                    VALUES (?, ?, ?, ?)
                    ON CONFLICT (user_id, comment_id)
                    DO UPDATE SET
                        vote_type = excluded.vote_type, -- Update vote_type with the new value
                        created_at = current_localtimestamp(); -- Update timestamp to reflect the latest vote
                """, [vote_id, user_id, comment_id, vote_type])

                # Append voted comment to history
                # Note: This appends the comment regardless of whether it was a new vote or an update.
                # The history is a simple log, not a reflection of vote changes.
                vote_text = "πŸ‘" if vote_type == "agree" else "πŸ‘Ž" if vote_type == "disagree" else "😐"
                comment_history = st.session_state.comment_history.split("\n\n")
                if len(comment_history) > 10:
                    comment_history = ["..."] + comment_history[-10:]
                st.session_state.comment_history = "\n\n".join(comment_history)
                st.session_state.comment_history += f"\n\n{vote_text} {current_comment_content}"

                # Check vote count and trigger special event
                # Initialize vote_count if it doesn't exist
                if 'vote_count' not in st.session_state:
                    st.session_state.vote_count = 0

                # Increment vote count only if it was a new vote or a change?
                # The current logic increments on every button click. Let's keep that for now
                # as it drives the special event trigger based on interaction frequency.
                st.session_state.vote_count += 1

                # Check if it's time for a potential special event (every 5 votes/interactions)
                if st.session_state.vote_count % 5 == 0:
                    st.session_state.vote_count = 0 # Reset count after triggering
                    # 30% chance to trigger the special sharing event
                    if random.random() < 0.3:
                        prompts = [
                            "An elder approaches you, seeking your perspective on the Emperor's concern. What wisdom do you share?",
                            "A letter arrives from the Emperor's office, requesting your personal insight on the matter. What counsel do you offer?",
                            "As you walk through the streets, people gather, eager to hear your thoughts on the Emperor's dilemma. What advice do you give?"
                        ]
                        # Pass the current topic_id to share_wisdom if needed, though it's not currently used there.
                        share_wisdom(random.choice(prompts), allow_skip=True)

                # Get next comment
                # This should always get the next unvoted comment for the user in this topic.
                next_comment_id, next_comment_content = get_random_unvoted_comment(user_id, topic_id)
                st.session_state.current_comment_id = next_comment_id
                st.session_state.current_comment_content = next_comment_content

                # Update progress
                # Update the user's progress to the next comment they should see.
                update_user_progress(user_id, topic_id, next_comment_id)

                st.rerun() # Rerun to update UI

            except Exception as e:
                st.error(f"Error processing vote: {e}")
            finally:
                if local_con:
                    local_con.close()

        col1, col2, col3, col4 = st.columns(4)
        user_id = find_or_create_user(user_email) # Ensure user exists

        col1.markdown("*Personally I...*")
        if col2.button("Agree"):
            handle_vote("agree", current_comment_id, topic_id, user_id)
        if col3.button("Neutral"):
            handle_vote("neutral", current_comment_id, topic_id, user_id)
        if col4.button("Disagree"):
            handle_vote("disagree", current_comment_id, topic_id, user_id)

    else:
        st.info("No more comments to vote on in this topic." if "No more comments" in current_comment_content else current_comment_content)


    st.markdown("")

    # --- Comment Submission ---
    with st.expander("Offer Your Counsel to the Emperor", expanded=False):
        share_wisdom("Having heard the thoughts of others, what wisdom do you wish to share regarding the Emperor's concern?")
    st.markdown("---")


    if st.button("Pack all insights and Return to Capital"):
        st.session_state.page = 'home'
        st.rerun()

# Initialize session state for navigation and data
if 'page' not in st.session_state:
    st.session_state.page = 'home'
if 'current_topic_id' not in st.session_state:
    st.session_state.current_topic_id = None
if 'user_email' not in st.session_state:
    st.session_state.user_email = '' # Mimics browser state
if 'current_comment_id' not in st.session_state:
    st.session_state.current_comment_id = None
if 'current_comment_content' not in st.session_state:
    st.session_state.current_comment_content = "Loading comments..."
if 'comment_history' not in st.session_state:
    st.session_state.comment_history = ""
if 'processed_url_params' not in st.session_state:
    st.session_state.processed_url_params = False # Add flag initialization

# Initialize the database on first run
initialize_database()

# Handle initial load from URL query parameters
# Process only once per session load using the flag
query_params = st.query_params
# Check for 'topic' param and if it hasn't been processed yet
if 'topic' in query_params and not st.session_state.processed_url_params:
    topic_id_from_url = query_params.get('topic') # Use .get for safety
    if topic_id_from_url: # Check if topic_id is actually retrieved
        st.session_state.page = 'view_topic'
        st.session_state.current_topic_id = topic_id_from_url
        st.session_state.processed_url_params = True # Mark as processed
        # The view_topic_page will handle loading user/comment based on session_state.user_email
        st.rerun() # Rerun to apply the page change


# Render the appropriate page based on session state
if st.session_state.page == 'home':
    home_page()
elif st.session_state.page == 'create_topic':
    create_topic_page()
elif st.session_state.page == 'view_topic':
    view_topic_page()