Spaces:
Running
on
Zero
Running
on
Zero
Daryl Lim
commited on
Commit
·
8c7a7bf
1
Parent(s):
e72dbda
Update app.py
Browse files- src/app.py +4 -4
src/app.py
CHANGED
@@ -17,9 +17,9 @@ today_date = datetime.today().strftime("%B %-d, %Y") # noqa: DTZ002
|
|
17 |
SYS_PROMPT = f"""Knowledge Cutoff Date: April 2024.
|
18 |
Today's Date: {today_date}.
|
19 |
You are Granite, developed by IBM. You are a helpful AI assistant"""
|
20 |
-
TITLE = "IBM Granite
|
21 |
DESCRIPTION = """
|
22 |
-
<p>Granite 3.
|
23 |
or enter your own. Keep in mind that AI can occasionally make mistakes.
|
24 |
<span class="gr_docs_link">
|
25 |
<a href="https://www.ibm.com/granite/docs/">View Documentation <i class="fa fa-external-link"></i></a>
|
@@ -37,9 +37,9 @@ if not torch.cuda.is_available():
|
|
37 |
print("This demo may not work on CPU.")
|
38 |
|
39 |
model = AutoModelForCausalLM.from_pretrained(
|
40 |
-
"ibm-granite/granite-3.
|
41 |
)
|
42 |
-
tokenizer = AutoTokenizer.from_pretrained("ibm-granite/granite-3.
|
43 |
tokenizer.use_default_system_prompt = False
|
44 |
|
45 |
|
|
|
17 |
SYS_PROMPT = f"""Knowledge Cutoff Date: April 2024.
|
18 |
Today's Date: {today_date}.
|
19 |
You are Granite, developed by IBM. You are a helpful AI assistant"""
|
20 |
+
TITLE = "IBM Granite Chatbot"
|
21 |
DESCRIPTION = """
|
22 |
+
<p>Granite 3.3 8b instruct is an open-source LLM supporting a 128k context window. Start with one of the sample prompts
|
23 |
or enter your own. Keep in mind that AI can occasionally make mistakes.
|
24 |
<span class="gr_docs_link">
|
25 |
<a href="https://www.ibm.com/granite/docs/">View Documentation <i class="fa fa-external-link"></i></a>
|
|
|
37 |
print("This demo may not work on CPU.")
|
38 |
|
39 |
model = AutoModelForCausalLM.from_pretrained(
|
40 |
+
"ibm-granite/granite-3.3-8b-instruct", torch_dtype=torch.float16, device_map="auto"
|
41 |
)
|
42 |
+
tokenizer = AutoTokenizer.from_pretrained("ibm-granite/granite-3.3-8b-instruct")
|
43 |
tokenizer.use_default_system_prompt = False
|
44 |
|
45 |
|