Update app.py
Browse files
app.py
CHANGED
@@ -687,30 +687,30 @@ def generate_html(local_files):
|
|
687 |
|
688 |
|
689 |
|
690 |
-
from gradio_client import Client
|
691 |
-
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
692 |
-
result = client.predict(
|
693 |
-
message="Hello!!",
|
694 |
-
llm_results_use=5,
|
695 |
-
database_choice="Semantic Search",
|
696 |
-
llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
|
697 |
-
api_name="/update_with_rag_md"
|
698 |
-
)
|
699 |
-
print(result)
|
700 |
-
Accepts 4 parameters:
|
701 |
-
message str Required
|
702 |
-
The input value that is provided in the "Search" Textbox component.
|
703 |
-
llm_results_use float Default: 5
|
704 |
-
The input value that is provided in the "Top n results as context" Slider component.
|
705 |
-
database_choice Literal['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)'] Default: "Semantic Search"
|
706 |
-
The input value that is provided in the "Search Source" Dropdown component.
|
707 |
-
llm_model_picked Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] Default: "mistralai/Mistral-7B-Instruct-v0.2"
|
708 |
-
The input value that is provided in the "LLM Model" Dropdown component.
|
709 |
-
Returns tuple of 2 elements
|
710 |
-
[0] str
|
711 |
-
The output value that appears in the "value_14" Markdown component.
|
712 |
-
[1] str
|
713 |
-
The output value that appears in the "value_13" Textbox component.
|
714 |
|
715 |
|
716 |
|
|
|
687 |
|
688 |
|
689 |
|
690 |
+
#from gradio_client import Client
|
691 |
+
#client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
692 |
+
#result = client.predict(
|
693 |
+
# message="Hello!!",
|
694 |
+
# llm_results_use=5,
|
695 |
+
# database_choice="Semantic Search",
|
696 |
+
# llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
|
697 |
+
# api_name="/update_with_rag_md"
|
698 |
+
#)
|
699 |
+
#print(result)
|
700 |
+
#Accepts 4 parameters:
|
701 |
+
#message str Required
|
702 |
+
##The input value that is provided in the "Search" Textbox component.
|
703 |
+
#llm_results_use float Default: 5
|
704 |
+
#The input value that is provided in the "Top n results as context" Slider component.
|
705 |
+
#database_choice Literal['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)'] Default: "Semantic Search"
|
706 |
+
#The input value that is provided in the "Search Source" Dropdown component.
|
707 |
+
#llm_model_picked Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] Default: "mistralai/Mistral-7B-Instruct-v0.2"
|
708 |
+
#The input value that is provided in the "LLM Model" Dropdown component.
|
709 |
+
#Returns tuple of 2 elements
|
710 |
+
#[0] str
|
711 |
+
#The output value that appears in the "value_14" Markdown component.
|
712 |
+
#[1] str
|
713 |
+
#The output value that appears in the "value_13" Textbox component.
|
714 |
|
715 |
|
716 |
|