fukugawa commited on
Commit
7497a42
·
1 Parent(s): 5c5f097

clear cache

Browse files
indiebot_arena/ui/battle.py CHANGED
@@ -11,6 +11,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStream
11
 
12
  from indiebot_arena.config import MODEL_SELECTION_MODE, MAX_INPUT_TOKEN_LENGTH, MAX_NEW_TOKENS
13
  from indiebot_arena.service.arena_service import ArenaService
 
14
 
15
  DESCRIPTION = "### 💬 チャットバトル"
16
 
@@ -19,8 +20,13 @@ docs_path = os.path.join(base_dir, "docs", "battle_header.md")
19
 
20
 
21
  @spaces.GPU(duration=30)
22
- def generate(chat_history: list, model_id: str, max_new_tokens: int = MAX_NEW_TOKENS,
23
- temperature: float = 0.6, top_p: float = 0.9, top_k: int = 50, repetition_penalty: float = 1.2)-> Iterator[str]:
 
 
 
 
 
24
  tokenizer = AutoTokenizer.from_pretrained(model_id)
25
  model = AutoModelForCausalLM.from_pretrained(
26
  model_id,
@@ -58,6 +64,11 @@ def generate(chat_history: list, model_id: str, max_new_tokens: int = MAX_NEW_TO
58
 
59
 
60
  def update_user_message(user_message, history_a, history_b, weight_class_radio):
 
 
 
 
 
61
  new_history_a = history_a + [{"role": "user", "content": user_message}]
62
  new_history_b = history_b + [{"role": "user", "content": user_message}]
63
  return "", new_history_a, new_history_b, gr.update(interactive=False)
 
11
 
12
  from indiebot_arena.config import MODEL_SELECTION_MODE, MAX_INPUT_TOKEN_LENGTH, MAX_NEW_TOKENS
13
  from indiebot_arena.service.arena_service import ArenaService
14
+ from indiebot_arena.util.cache_manager import get_free_space_gb, clear_hf_cache
15
 
16
  DESCRIPTION = "### 💬 チャットバトル"
17
 
 
20
 
21
 
22
  @spaces.GPU(duration=30)
23
+ def generate(chat_history: list,
24
+ model_id: str,
25
+ max_new_tokens: int = MAX_NEW_TOKENS,
26
+ temperature: float = 0.6,
27
+ top_p: float = 0.9,
28
+ top_k: int = 50,
29
+ repetition_penalty: float = 1.2) -> Iterator[str]:
30
  tokenizer = AutoTokenizer.from_pretrained(model_id)
31
  model = AutoModelForCausalLM.from_pretrained(
32
  model_id,
 
64
 
65
 
66
  def update_user_message(user_message, history_a, history_b, weight_class_radio):
67
+ total, _, free = get_free_space_gb("/data")
68
+ print(f"空きディスク容量: {free:.2f} GB / {total:.2f} GB")
69
+ if free < (total * 0.2):
70
+ clear_hf_cache()
71
+
72
  new_history_a = history_a + [{"role": "user", "content": user_message}]
73
  new_history_b = history_b + [{"role": "user", "content": user_message}]
74
  return "", new_history_a, new_history_b, gr.update(interactive=False)
indiebot_arena/util/__init__.py ADDED
File without changes
indiebot_arena/util/cache_manager.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+
4
+
5
+ def get_free_space_gb(path):
6
+ total, used, free = shutil.disk_usage(path)
7
+ return total / (1024 ** 3), used / (1024 ** 3), free / (1024 ** 3) # バイトからGBに変換
8
+
9
+
10
+ def clear_hf_cache():
11
+ """
12
+ HF_HOME 環境変数で指定されたディレクトリ内のキャッシュ(hubフォルダ)を削除する。
13
+ HF_HOMEが設定されていない場合はデフォルトの "~/.cache/huggingface" を使用する。
14
+ """
15
+ hf_home = os.environ.get("HF_HOME", os.path.expanduser("~/.cache/huggingface"))
16
+ cache_path = os.path.join(hf_home, "hub")
17
+
18
+ if os.path.exists(cache_path):
19
+ shutil.rmtree(cache_path)
20
+ print(f"Hugging Faceのキャッシュ({cache_path})をクリアしました。")
21
+ else:
22
+ print(f"キャッシュディレクトリ {cache_path} は存在しません。")