Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,9 @@ import subprocess
|
|
6 |
import spaces
|
7 |
|
8 |
OLLAMA = os.path.expanduser("~/ollama")
|
9 |
-
process =
|
|
|
|
|
10 |
|
11 |
if not os.path.exists(OLLAMA):
|
12 |
subprocess.run("curl -L https://ollama.com/download/ollama-linux-amd64 -o ~/ollama", shell=True)
|
@@ -17,13 +19,14 @@ def ollama_service_thread():
|
|
17 |
process = subprocess.Popen("~/ollama serve", shell=True, preexec_fn=os.setsid)
|
18 |
process.wait()
|
19 |
|
20 |
-
OLLAMA_SERVICE_THREAD = threading.Thread(target=ollama_service_thread)
|
21 |
-
# OLLAMA_SERVICE_THREAD.start()
|
22 |
-
|
23 |
def terminate():
|
|
|
24 |
if process:
|
25 |
os.killpg(os.getpgid(process.pid), signal.SIGTERM)
|
26 |
-
OLLAMA_SERVICE_THREAD
|
|
|
|
|
|
|
27 |
|
28 |
# Uncomment and modify the model to what you want locally
|
29 |
# model = "moondream"
|
@@ -65,6 +68,8 @@ h3 {
|
|
65 |
INIT_SIGN = ""
|
66 |
|
67 |
def init():
|
|
|
|
|
68 |
OLLAMA_SERVICE_THREAD.start()
|
69 |
print("Giving ollama serve a moment")
|
70 |
time.sleep(10)
|
|
|
6 |
import spaces
|
7 |
|
8 |
OLLAMA = os.path.expanduser("~/ollama")
|
9 |
+
process = None
|
10 |
+
OLLAMA_SERVICE_THREAD = None
|
11 |
+
|
12 |
|
13 |
if not os.path.exists(OLLAMA):
|
14 |
subprocess.run("curl -L https://ollama.com/download/ollama-linux-amd64 -o ~/ollama", shell=True)
|
|
|
19 |
process = subprocess.Popen("~/ollama serve", shell=True, preexec_fn=os.setsid)
|
20 |
process.wait()
|
21 |
|
|
|
|
|
|
|
22 |
def terminate():
|
23 |
+
global process, OLLAMA_SERVICE_THREAD
|
24 |
if process:
|
25 |
os.killpg(os.getpgid(process.pid), signal.SIGTERM)
|
26 |
+
if OLLAMA_SERVICE_THREAD:
|
27 |
+
OLLAMA_SERVICE_THREAD.join()
|
28 |
+
process = None
|
29 |
+
OLLAMA_SERVICE_THREAD = None
|
30 |
|
31 |
# Uncomment and modify the model to what you want locally
|
32 |
# model = "moondream"
|
|
|
68 |
INIT_SIGN = ""
|
69 |
|
70 |
def init():
|
71 |
+
global OLLAMA_SERVICE_THREAD
|
72 |
+
OLLAMA_SERVICE_THREAD = threading.Thread(target=ollama_service_thread)
|
73 |
OLLAMA_SERVICE_THREAD.start()
|
74 |
print("Giving ollama serve a moment")
|
75 |
time.sleep(10)
|