chansung commited on
Commit
aeabaec
Β·
1 Parent(s): 16d0614

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -6
app.py CHANGED
@@ -5,6 +5,10 @@ import requests
5
  import gradio as gr
6
 
7
  STYLE = """
 
 
 
 
8
  .group-border {
9
  padding: 10px;
10
  border-width: 1px;
@@ -184,7 +188,7 @@ def submit(
184
  return f"something went wrong {response.status_code} = {response.text}"
185
 
186
  with gr.Blocks(css=STYLE) as hf_endpoint:
187
- with gr.Tab("πŸ€— Inference Endpoint"):
188
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
189
 
190
  with gr.Column(elem_classes=["group-border"]):
@@ -306,7 +310,7 @@ Name for your new endpoint""")
306
  )
307
 
308
  with gr.Column(elem_classes=["group-border"]):
309
- with gr.Accordion("Serving Container", open=False):
310
  with gr.Column():
311
  gr.Markdown("""### Container Type
312
 
@@ -415,16 +419,16 @@ Name for your new endpoint""")
415
  security_selector],
416
  outputs=status_txt)
417
 
418
- with gr.Tab("AWS"):
419
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
420
 
421
- with gr.Tab("GCP"):
422
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
423
 
424
- with gr.Tab("Azure"):
425
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
426
 
427
- with gr.Tab("Lambdalabs"):
428
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
429
 
430
  hf_endpoint.launch(enable_queue=True, debug=True)
 
5
  import gradio as gr
6
 
7
  STYLE = """
8
+ .no-border {
9
+ border: none;
10
+ }
11
+
12
  .group-border {
13
  padding: 10px;
14
  border-width: 1px;
 
188
  return f"something went wrong {response.status_code} = {response.text}"
189
 
190
  with gr.Blocks(css=STYLE) as hf_endpoint:
191
+ with gr.Tab("πŸ€— Inference Endpoint", elem_classes=["no-border"]):
192
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
193
 
194
  with gr.Column(elem_classes=["group-border"]):
 
310
  )
311
 
312
  with gr.Column(elem_classes=["group-border"]):
313
+ with gr.Accordion("Serving Container", open=False, elem_classes=["no-border"]):
314
  with gr.Column():
315
  gr.Markdown("""### Container Type
316
 
 
419
  security_selector],
420
  outputs=status_txt)
421
 
422
+ with gr.Tab("AWS", elem_classes=["no-border"]):
423
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
424
 
425
+ with gr.Tab("GCP", elem_classes=["no-border"]):
426
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
427
 
428
+ with gr.Tab("Azure", elem_classes=["no-border"]):
429
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
430
 
431
+ with gr.Tab("Lambdalabs", elem_classes=["no-border"]):
432
  gr.Markdown("# Deploy LLM on πŸ€— Hugging Face Inference Endpoint", elem_classes=["center"])
433
 
434
  hf_endpoint.launch(enable_queue=True, debug=True)