davanstrien HF Staff commited on
Commit
a0384f7
·
verified ·
1 Parent(s): da5ada1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -14
app.py CHANGED
@@ -94,42 +94,39 @@ def generate_summary(card_text: str, card_type: str) -> str:
94
  """Cached wrapper for generate_summary with TTL."""
95
  return _generate_summary_gpu(card_text, card_type)
96
 
97
- def summarize(hub_id: str = "", card_type: str = "model") -> Tuple[str, str]:
98
- """Interface function for Gradio. Returns both text and JSON formats."""
99
  try:
100
  if hub_id:
101
- # Fetch and validate card type
102
  inferred_type, card_text = get_card_info(hub_id)
 
103
  if card_type and card_type != inferred_type:
104
  error_msg = f"Error: Provided card_type '{card_type}' doesn't match inferred type '{inferred_type}'"
105
- return error_msg, f'{{"error": "{error_msg}"}}'
106
  card_type = inferred_type
107
  else:
108
  error_msg = "Error: Hub ID must be provided"
109
- return error_msg, f'{{"error": "{error_msg}"}}'
110
 
111
  # Use the cached wrapper
112
  summary = generate_summary(card_text, card_type)
113
- json_output = f'{{"summary": "{summary}", "type": "{card_type}", "hub_id": "{hub_id}"}}'
114
- return summary, json_output
115
 
116
  except Exception as e:
117
  error_msg = str(e)
118
- return f"Error: {error_msg}", f'{{"error": "{error_msg}"}}'
119
 
120
  def create_interface():
121
  interface = gr.Interface(
122
  fn=summarize,
123
  inputs=[
124
  gr.Textbox(label="Hub ID", placeholder="e.g., huggingface/llama-7b"),
125
- gr.Radio(choices=["model", "dataset"], label="Card Type", value="model"),
126
- ],
127
- outputs=[
128
- gr.Textbox(label="Summary"),
129
- gr.JSON(label="JSON Output")
130
  ],
 
131
  title="Hugging Face Hub TLDR Generator",
132
- description="Generate concise summaries of model and dataset cards from the Hugging Face Hub.",
133
  )
134
  return interface
135
 
 
94
  """Cached wrapper for generate_summary with TTL."""
95
  return _generate_summary_gpu(card_text, card_type)
96
 
97
+ def summarize(hub_id: str = "", card_type: str = None) -> str:
98
+ """Interface function for Gradio. Returns JSON format."""
99
  try:
100
  if hub_id:
101
+ # Fetch and infer card type
102
  inferred_type, card_text = get_card_info(hub_id)
103
+ # Only validate if card_type was explicitly provided
104
  if card_type and card_type != inferred_type:
105
  error_msg = f"Error: Provided card_type '{card_type}' doesn't match inferred type '{inferred_type}'"
106
+ return f'{{"error": "{error_msg}"}}'
107
  card_type = inferred_type
108
  else:
109
  error_msg = "Error: Hub ID must be provided"
110
+ return f'{{"error": "{error_msg}"}}'
111
 
112
  # Use the cached wrapper
113
  summary = generate_summary(card_text, card_type)
114
+ return f'{{"summary": "{summary}", "type": "{card_type}", "hub_id": "{hub_id}"}}'
 
115
 
116
  except Exception as e:
117
  error_msg = str(e)
118
+ return f'{{"error": "{error_msg}"}}'
119
 
120
  def create_interface():
121
  interface = gr.Interface(
122
  fn=summarize,
123
  inputs=[
124
  gr.Textbox(label="Hub ID", placeholder="e.g., huggingface/llama-7b"),
125
+ gr.Radio(choices=["model", "dataset", None], label="Card Type (optional)", value=None),
 
 
 
 
126
  ],
127
+ outputs=gr.JSON(label="Output"),
128
  title="Hugging Face Hub TLDR Generator",
129
+ description="Generate concise summaries of model and dataset cards from the Hugging Face Hub. Leave card type empty for automatic detection.",
130
  )
131
  return interface
132