openfree commited on
Commit
04bc27d
ยท
verified ยท
1 Parent(s): 25c851c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -49
app.py CHANGED
@@ -864,58 +864,58 @@ with gr.Blocks(fill_height=True, title="Enhanced ThinkFlow") as demo:
864
  temperature = gr.Slider(0.1, 1.0, 0.7, step=0.1, label="์˜จ๋„")
865
  memory_weight = gr.Slider(0.0, 1.0, 0.5, step=0.1, label="๋ฉ”๋ชจ๋ฆฌ ๋ฐ˜์˜ ๊ฐ€์ค‘์น˜")
866
 
867
- # ํ”ผ๋“œ๋ฐฑ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜
868
- def process_positive_feedback():
869
- global buffer_manager, current_contexts
870
- if buffer_manager:
871
- buffer_manager.update_retrieval_reward(current_contexts, reward=1.0)
872
- return "ํ”ผ๋“œ๋ฐฑ ๊ฐ์‚ฌํ•ฉ๋‹ˆ๋‹ค! ์ด ์ ‘๊ทผ ๋ฐฉ์‹์„ ํ–ฅํ›„ ์œ ์‚ฌํ•œ ์งˆ๋ฌธ์— ๋” ์ž์ฃผ ์‚ฌ์šฉํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค."
873
-
874
- def process_negative_feedback():
875
- global buffer_manager, current_contexts
876
- if buffer_manager:
877
- buffer_manager.update_retrieval_reward(current_contexts, reward=-0.5)
878
- return "ํ”ผ๋“œ๋ฐฑ ๊ฐ์‚ฌํ•ฉ๋‹ˆ๋‹ค! ์ด ์ ‘๊ทผ ๋ฐฉ์‹์„ ๊ฐœ์„ ํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค."
879
-
880
- def clear_memory():
881
- global buffer_manager
882
- if buffer_manager:
883
- buffer_manager.clear()
884
- return "๋ฉ”๋ชจ๋ฆฌ๊ฐ€ ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค."
885
-
886
- def update_memory_displays():
887
- global buffer_manager
888
- if not buffer_manager:
889
- return "๋ฉ”๋ชจ๋ฆฌ๊ฐ€ ์ดˆ๊ธฐํ™”๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.", "๊ทธ๋ž˜ํ”„๊ฐ€ ์ดˆ๊ธฐํ™”๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค."
890
 
891
- semantic_text = "ํ˜„์žฌ ์ €์žฅ๋œ ๋ฉ”๋ชจ๋ฆฌ:\n\n"
892
- for i, mem in enumerate(buffer_manager.semantic_memory.memories[:5]): # ์ตœ๋Œ€ 5๊ฐœ๋งŒ ํ‘œ์‹œ
893
- semantic_text += f"{i+1}. {mem['text'][:100]}...\n\n"
894
 
895
- graph_text = "ํ˜„์žฌ ๊ทธ๋ž˜ํ”„ ๋…ธ๋“œ:\n\n"
896
- for node in buffer_manager.graph_memory.graph.nodes():
897
- node_text = buffer_manager.graph_memory.get_text_by_node(node)
898
- neighbors = list(buffer_manager.graph_memory.graph.neighbors(node))
899
- graph_text += f"๋…ธ๋“œ: {node}\n์„ค๋ช…: {node_text[:50]}...\n์—ฐ๊ฒฐ: {', '.join(neighbors[:3])}\n\n"
900
 
901
- return semantic_text, graph_text
902
-
903
- # ์ดˆ๊ธฐํ™” ํ•จ์ˆ˜
904
- def initialize_models():
905
- global pipe, buffer_manager, model_name
906
- try:
907
- pipe, buffer_manager = initialize_model_and_manager(model_name)
908
- semantic_text, graph_text = update_memory_displays()
909
- return "๋ชจ๋ธ์ด ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.", semantic_text, graph_text
910
- except Exception as e:
911
- return f"๋ชจ๋ธ ์ดˆ๊ธฐํ™” ์˜ค๋ฅ˜: {str(e)}", "", ""
912
-
913
- # ๋ชจ๋ธ ์„ ํƒ ๋ณ€๊ฒฝ ์‹œ ์ฒ˜๋ฆฌ
914
- def change_model(new_model_name):
915
- global model_name
916
- model_name = new_model_name
917
- status, semantic_text, graph_text = initialize_models()
918
- return status, semantic_text, graph_text
919
 
920
 
921
 
 
864
  temperature = gr.Slider(0.1, 1.0, 0.7, step=0.1, label="์˜จ๋„")
865
  memory_weight = gr.Slider(0.0, 1.0, 0.5, step=0.1, label="๋ฉ”๋ชจ๋ฆฌ ๋ฐ˜์˜ ๊ฐ€์ค‘์น˜")
866
 
867
+ # ํ”ผ๋“œ๋ฐฑ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜
868
+ def process_positive_feedback():
869
+ global buffer_manager, current_contexts
870
+ if buffer_manager:
871
+ buffer_manager.update_retrieval_reward(current_contexts, reward=1.0)
872
+ return "ํ”ผ๋“œ๋ฐฑ ๊ฐ์‚ฌํ•ฉ๋‹ˆ๋‹ค! ์ด ์ ‘๊ทผ ๋ฐฉ์‹์„ ํ–ฅํ›„ ์œ ์‚ฌํ•œ ์งˆ๋ฌธ์— ๋” ์ž์ฃผ ์‚ฌ์šฉํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค."
873
+
874
+ def process_negative_feedback():
875
+ global buffer_manager, current_contexts
876
+ if buffer_manager:
877
+ buffer_manager.update_retrieval_reward(current_contexts, reward=-0.5)
878
+ return "ํ”ผ๋“œ๋ฐฑ ๊ฐ์‚ฌํ•ฉ๋‹ˆ๋‹ค! ์ด ์ ‘๊ทผ ๋ฐฉ์‹์„ ๊ฐœ์„ ํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค."
879
+
880
+ def clear_memory():
881
+ global buffer_manager
882
+ if buffer_manager:
883
+ buffer_manager.clear()
884
+ return "๋ฉ”๋ชจ๋ฆฌ๊ฐ€ ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค."
885
+
886
+ def update_memory_displays():
887
+ global buffer_manager
888
+ if not buffer_manager:
889
+ return "๋ฉ”๋ชจ๋ฆฌ๊ฐ€ ์ดˆ๊ธฐํ™”๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.", "๊ทธ๋ž˜ํ”„๊ฐ€ ์ดˆ๊ธฐํ™”๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค."
890
 
891
+ semantic_text = "ํ˜„์žฌ ์ €์žฅ๋œ ๋ฉ”๋ชจ๋ฆฌ:\n\n"
892
+ for i, mem in enumerate(buffer_manager.semantic_memory.memories[:5]): # ์ตœ๋Œ€ 5๊ฐœ๋งŒ ํ‘œ์‹œ
893
+ semantic_text += f"{i+1}. {mem['text'][:100]}...\n\n"
894
 
895
+ graph_text = "ํ˜„์žฌ ๊ทธ๋ž˜ํ”„ ๋…ธ๋“œ:\n\n"
896
+ for node in buffer_manager.graph_memory.graph.nodes():
897
+ node_text = buffer_manager.graph_memory.get_text_by_node(node)
898
+ neighbors = list(buffer_manager.graph_memory.graph.neighbors(node))
899
+ graph_text += f"๋…ธ๋“œ: {node}\n์„ค๋ช…: {node_text[:50]}...\n์—ฐ๊ฒฐ: {', '.join(neighbors[:3])}\n\n"
900
 
901
+ return semantic_text, graph_text
902
+
903
+ # ์ดˆ๊ธฐํ™” ํ•จ์ˆ˜
904
+ def initialize_models():
905
+ global pipe, buffer_manager, model_name
906
+ try:
907
+ pipe, buffer_manager = initialize_model_and_manager(model_name)
908
+ semantic_text, graph_text = update_memory_displays()
909
+ return "๋ชจ๋ธ์ด ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.", semantic_text, graph_text
910
+ except Exception as e:
911
+ return f"๋ชจ๋ธ ์ดˆ๊ธฐํ™” ์˜ค๋ฅ˜: {str(e)}", "", ""
912
+
913
+ # ๋ชจ๋ธ ์„ ํƒ ๋ณ€๊ฒฝ ์‹œ ์ฒ˜๋ฆฌ
914
+ def change_model(new_model_name):
915
+ global model_name
916
+ model_name = new_model_name
917
+ status, semantic_text, graph_text = initialize_models()
918
+ return status, semantic_text, graph_text
919
 
920
 
921