Anupam007 commited on
Commit
ae37e96
·
verified ·
1 Parent(s): 36bb623

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +234 -259
app.py CHANGED
@@ -1,328 +1,305 @@
1
- # VisualMind App - Fixed Version with Better Error Handling
2
  # Install required libraries
3
- #!pip install gradio>=3.50.2 transformers>=4.30.0 networkx>=3.0 matplotlib>=3.7.0 torch>=2.0.0
4
 
5
  import gradio as gr
6
  import torch
7
  import networkx as nx
8
- import matplotlib.pyplot as plt
9
  from transformers import pipeline
10
  import numpy as np
11
- import io
12
- from PIL import Image
13
- import time
14
  import traceback
15
 
16
- # Check if GPU is available and print device info
17
  device = "cuda" if torch.cuda.is_available() else "cpu"
18
  print(f"Using device: {device}")
19
 
20
- # Load simpler text generation model for faster processing
21
  try:
22
- summarizer = pipeline("summarization", model="facebook/bart-large-cnn", device=0 if device=="cuda" else -1)
23
- text_generator = pipeline("text-generation", model="gpt2", device=0 if device=="cuda" else -1)
24
  print("✓ Models loaded successfully")
25
  except Exception as e:
26
  print(f"Model loading error: {e}")
27
- # Fallback to smaller models
28
  try:
29
- summarizer = pipeline("summarization", model="sshleifer/distilbart-cnn-6-6", device=0 if device=="cuda" else -1)
30
- text_generator = pipeline("text-generation", model="distilgpt2", device=0 if device=="cuda" else -1)
31
  print("✓ Fallback models loaded successfully")
32
  except:
33
- print("Critical error loading models. Please check your environment.")
34
 
35
- # Add progress tracking
36
  progress_status = {"value": 0, "desc": ""}
37
 
38
  def update_progress(value, desc=""):
39
- """Update progress tracking"""
40
  progress_status["value"] = value
41
  progress_status["desc"] = desc
42
  print(f"Progress: {value}% - {desc}")
43
 
44
- # Function to generate summary (with error handling)
45
  def generate_summary(topic):
46
- """Generate a textual summary based on the input topic"""
47
  try:
48
- update_progress(10, "Starting summary generation")
49
-
50
- # Create a prompt about the topic
51
- prompt = f"""
52
- Provide a comprehensive analysis of the startup idea: {topic}
53
-
54
- Include the following sections:
55
- - Core concept and value proposition
56
- - Target market and potential customers
57
- - Technology and implementation requirements
58
- - Business model and revenue streams
59
- - Competitive advantages and market positioning
60
- - Potential challenges and mitigation strategies
61
- """
62
-
63
- # For startup ideas, generate more custom content rather than summarizing
64
- # This simulates what the T5 model would do but with better error handling
65
- update_progress(30, "Processing topic information")
66
 
67
  if "AI" in topic or "artificial intelligence" in topic.lower():
68
- ai_component = "The artificial intelligence component provides significant competitive advantage through data analysis and automation."
69
  else:
70
- ai_component = "Adding AI capabilities could enhance this startup idea through data analysis and process automation."
71
 
72
- # Generate the summary with custom components for startup ideas
73
  if "urban farming" in topic.lower() or "agriculture" in topic.lower():
74
  summary = f"""
75
- {topic} represents an innovative approach to addressing food security and sustainability challenges.
76
-
77
- Core concept: This startup combines advanced technologies with urban agriculture to enable efficient food production in cities. The solution uses sensor networks and intelligent systems to monitor and optimize growing conditions in vertical farms, rooftop gardens, or indoor growing facilities.
78
-
79
- Target market: Primary customers include restaurants seeking farm-to-table produce, health-conscious urban consumers, grocery chains, and food service companies looking to reduce supply chain costs.
80
-
81
- Technology requirements: The implementation requires IoT sensor arrays, climate control systems, a central AI management platform, and mobile applications for monitoring. {ai_component}
82
-
83
- Business model: Revenue streams include B2B installations for commercial clients, subscription services for consumers, technology licensing, and potentially data monetization from agricultural insights.
84
-
85
- Competitive advantages: The solution offers fresher produce with lower transportation costs, reduced water usage (up to 95% compared to traditional farming), and year-round growing capabilities regardless of external climate conditions.
86
-
87
- Challenges include high initial capital costs, technical complexity, regulatory hurdles in some urban areas, and the need to demonstrate ROI to potential customers.
88
  """
89
  else:
90
- # Generic startup analysis if not urban farming/agriculture
91
  outputs = text_generator(
92
- f"A startup idea focused on {topic} would involve the following elements:",
93
- max_length=500,
94
  num_return_sequences=1,
95
  temperature=0.7
96
  )
97
-
98
  generated_text = outputs[0]['generated_text']
99
- # Clean up and structure the output
100
  summary = f"""
101
- {topic} represents an innovative startup opportunity with significant market potential.
102
-
103
- Core concept: This startup addresses [specific problem/need] by providing [solution approach] through innovative technology and business model design.
 
 
 
 
 
 
 
 
 
 
104
 
105
  {generated_text}
106
-
107
- {ai_component}
108
-
109
- Major challenges would include initial funding requirements, building the right team with domain expertise, and establishing market traction against existing competitors.
110
  """
111
 
112
- update_progress(60, "Summary generated")
113
  return summary
114
 
115
  except Exception as e:
116
- error_msg = f"Error generating summary: {str(e)}\n{traceback.format_exc()}"
117
- print(error_msg)
118
- return f"Unable to generate summary. Error: {str(e)}\n\nPlease try again with a different topic or check your connection."
119
 
120
- # Function to generate mind map
121
  def generate_mindmap(topic, summary):
122
- """Generate a mind map based on the topic and summary"""
123
  try:
124
- update_progress(70, "Creating mind map")
125
 
126
- # Extract concepts from summary (simplified for reliability)
127
  concepts = [
128
- "Core Technology",
129
- "Market Opportunity",
130
- "Business Model",
131
- "Competitive Advantage",
132
- "Implementation Challenges"
133
  ]
134
-
135
- # For each concept, create subconcepts
136
  subconcepts = {
137
- "Core Technology": ["Key Components", "Technical Requirements", "Development Roadmap"],
138
- "Market Opportunity": ["Target Customers", "Market Size", "Growth Potential"],
139
- "Business Model": ["Revenue Streams", "Pricing Strategy", "Partnership Opportunities"],
140
- "Competitive Advantage": ["Unique Selling Points", "Barriers to Entry", "IP Protection"],
141
- "Implementation Challenges": ["Resource Requirements", "Regulatory Concerns", "Timeline"]
 
142
  }
143
 
144
- # Create a graph
145
- G = nx.Graph()
146
-
147
- # Add the central node
148
- G.add_node(topic, size=2000)
149
-
150
- # Add concept nodes and connect to central node
151
- for concept in concepts:
152
- G.add_node(concept, size=1000)
153
- G.add_edge(topic, concept, weight=2)
154
-
155
- # Add subconcept nodes and connect to concept nodes
156
- for subconcept in subconcepts[concept]:
157
- G.add_node(subconcept, size=500)
158
- G.add_edge(concept, subconcept, weight=1)
159
-
160
- update_progress(80, "Designing visualization")
161
-
162
- # Create a visually appealing mind map
163
- plt.figure(figsize=(14, 10))
164
-
165
- # Use a more stable layout algorithm
166
- pos = nx.spring_layout(G, k=0.5, seed=42)
167
-
168
- # Draw nodes with different sizes and colors
169
- node_sizes = [G.nodes[node].get('size', 300) for node in G.nodes()]
170
-
171
- # Color scheme: main topic, concepts, subconcepts
172
- node_colors = []
173
- for node in G.nodes():
174
- if node == topic:
175
- node_colors.append('#FF9999') # Red for main topic
176
- elif node in concepts:
177
- node_colors.append('#99CCFF') # Blue for main concepts
178
- else:
179
- node_colors.append('#CCFF99') # Green for subconcepts
180
-
181
- # Draw the network elements
182
- nx.draw_networkx_nodes(G, pos, node_size=node_sizes, node_color=node_colors, alpha=0.8)
183
- nx.draw_networkx_edges(G, pos, width=1.5, alpha=0.5, edge_color='gray')
184
-
185
- # Custom node labels with different font sizes
186
- for node, (x, y) in pos.items():
187
- font_size = 15 if node == topic else 12 if node in concepts else 9
188
- plt.text(
189
- x, y, node,
190
- fontsize=font_size,
191
- ha='center', va='center',
192
- bbox=dict(boxstyle="round,pad=0.3", fc='white', ec='gray', alpha=0.8)
193
- )
194
-
195
- plt.title(f"Mind Map: {topic}", fontsize=16)
196
- plt.axis('off')
197
-
198
- # Save figure to bytes
199
- buf = io.BytesIO()
200
- plt.savefig(buf, format='png', dpi=150, bbox_inches='tight')
201
- buf.seek(0)
202
-
203
- # Convert to base64 for displaying in gradio
204
- mindmap_img = Image.open(buf)
205
- plt.close()
206
 
207
- update_progress(100, "Mind map completed")
208
- return mindmap_img
209
 
210
  except Exception as e:
211
- error_msg = f"Error generating mind map: {str(e)}\n{traceback.format_exc()}"
212
- print(error_msg)
213
-
214
- # Create a simple error image
215
- plt.figure(figsize=(10, 6))
216
- plt.text(0.5, 0.5, f"Mind map generation error:\n{str(e)}",
217
- ha='center', va='center', fontsize=12, color='red',
218
- bbox=dict(facecolor='white', alpha=0.8))
219
- plt.axis('off')
220
-
221
- # Save error image
222
- buf = io.BytesIO()
223
- plt.savefig(buf, format='png', dpi=100)
224
- buf.seek(0)
225
- error_img = Image.open(buf)
226
- plt.close()
227
-
228
- return error_img
229
 
230
- # Main processing function
231
  def process_input(topic):
232
- """Process the main topic input"""
233
  try:
234
  if not topic or len(topic.strip()) < 3:
235
- return "Please enter a valid topic (at least 3 characters).", None, gr.update(visible=False)
236
-
237
- print(f"Processing topic: {topic}")
238
- update_progress(0, "Starting processing")
239
 
240
- # Generate summary
241
- summary = generate_summary(topic)
242
 
243
- # Generate mind map
244
  mindmap = generate_mindmap(topic, summary)
245
 
246
  return summary, mindmap, gr.update(visible=True)
247
 
248
  except Exception as e:
249
- error_msg = f"Error in main processing: {str(e)}\n{traceback.format_exc()}"
250
- print(error_msg)
251
- return f"An error occurred: {str(e)}", None, gr.update(visible=True)
252
 
253
- # Function to process follow-up questions
254
  def process_followup_question(question, current_summary):
255
- """Process a follow-up question"""
256
  try:
257
  if not question or len(question.strip()) < 5:
258
- return "Please enter a valid question (at least 5 characters).", None
259
 
260
- print(f"Processing follow-up question: {question}")
261
-
262
- # Generate answer using the text generator
263
  prompt = f"Question: {question}\n\nContext: {current_summary}\n\nAnswer:"
264
 
265
- response = text_generator(
266
- prompt,
267
- max_length=300,
268
- num_return_sequences=1,
269
- temperature=0.7
270
- )
271
-
272
- # Extract and clean up the generated text
273
- answer_text = response[0]['generated_text']
274
- # Find where the answer starts
275
- answer_start = answer_text.find("Answer:") + 7 if "Answer:" in answer_text else 0
276
- answer = answer_text[answer_start:].strip()
277
 
278
- # For simplicity in this fixed version, we'll return the same mindmap
279
- return answer, None
280
 
281
  except Exception as e:
282
- error_msg = f"Error processing follow-up: {str(e)}\n{traceback.format_exc()}"
283
- print(error_msg)
284
- return f"An error occurred while processing your question: {str(e)}", None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
285
 
286
- # Create the Gradio interface
287
- with gr.Blocks(theme=gr.themes.Soft()) as demo:
288
  current_summary = gr.State("")
289
-
290
- gr.Markdown("# 🧠 VisualMind Startup Idea Generator")
291
- gr.Markdown("Enter a startup idea topic to generate a comprehensive analysis and mind map.")
292
-
293
- with gr.Row():
294
- with gr.Column(scale=1):
295
- topic_input = gr.Textbox(
296
- label="Enter Startup Topic",
297
- placeholder="e.g., AI-Powered Urban Farming, Blockchain for Supply Chain, Virtual Reality Education"
298
- )
299
- submit_btn = gr.Button("Generate Analysis", variant="primary")
300
-
301
- # Add a visible progress bar
302
- with gr.Row():
303
- progress = gr.Textbox(label="Status", value="Ready to generate", interactive=False)
304
-
305
- with gr.Row(visible=False) as results_container:
306
- with gr.Column(scale=1):
307
- summary_output = gr.Textbox(label="Startup Analysis", lines=15)
308
 
309
- with gr.Accordion("Ask Follow-up Questions", open=False):
310
- followup_input = gr.Textbox(
311
- label="Ask a question about this startup idea",
312
- placeholder="e.g., What would be the initial funding requirements?"
313
- )
314
- followup_btn = gr.Button("Get Answer")
315
- followup_output = gr.Textbox(label="Answer", lines=5)
316
-
317
- with gr.Column(scale=1):
318
- mindmap_output = gr.Image(label="Mind Map", type="pil")
319
-
320
- # Set up event handlers
321
- def update_status():
322
- return f"{progress_status['value']}% - {progress_status['desc']}"
323
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
324
  submit_btn.click(
325
- fn=lambda: "Processing your startup topic...",
326
  inputs=None,
327
  outputs=progress
328
  ).then(
@@ -334,13 +311,13 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
334
  inputs=[summary_output],
335
  outputs=[current_summary]
336
  ).then(
337
- fn=lambda: "Ready for follow-up questions",
338
  inputs=None,
339
  outputs=progress
340
  )
341
 
342
  followup_btn.click(
343
- fn=lambda: "Processing your question...",
344
  inputs=None,
345
  outputs=progress
346
  ).then(
@@ -348,32 +325,30 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
348
  inputs=[followup_input, current_summary],
349
  outputs=[followup_output, mindmap_output]
350
  ).then(
351
- fn=lambda: "Follow-up processed",
352
  inputs=None,
353
  outputs=progress
354
  )
355
 
356
- gr.Markdown("### How It Works")
357
- gr.Markdown("""
358
- 1. Enter a startup idea topic in the input field and click 'Generate Analysis'
359
- 2. The AI will analyze your startup concept and generate a comprehensive breakdown
360
- 3. A mind map will be created showing key aspects of the business idea
361
- 4. Ask follow-up questions to explore specific aspects of the startup concept
362
 
363
- Note: This tool is for brainstorming and ideation purposes. Real startup validation requires market research and testing.
364
- """)
 
 
 
365
 
366
- # Launch the app with better error handling
367
  try:
368
- demo.launch(debug=True, share=True)
369
  except Exception as e:
370
- print(f"Error launching Gradio app: {e}")
371
- print("Trying alternative launch method...")
372
  try:
373
- demo.launch(debug=True, server_name="0.0.0.0")
374
  except Exception as e2:
375
- print(f"Second launch attempt failed: {e2}")
376
- print("\nTROUBLESHOOTING TIPS:")
377
- print("1. Make sure you're running this in Google Colab")
378
- print("2. Try Runtime > Restart runtime and run again")
379
- print("3. Check that all libraries installed correctly")
 
 
1
  # Install required libraries
2
+ # !pip install gradio>=3.50.2 transformers>=4.30.0 networkx>=3.0 plotly>=5.0.0 torch>=2.0.0 numpy>=1.21.0
3
 
4
  import gradio as gr
5
  import torch
6
  import networkx as nx
7
+ import plotly.graph_objects as go
8
  from transformers import pipeline
9
  import numpy as np
 
 
 
10
  import traceback
11
 
12
+ # Check device
13
  device = "cuda" if torch.cuda.is_available() else "cpu"
14
  print(f"Using device: {device}")
15
 
16
+ # Load models with fallback
17
  try:
18
+ summarizer = pipeline("summarization", model="facebook/bart-large-cnn", device=0 if device == "cuda" else -1)
19
+ text_generator = pipeline("text-generation", model="gpt2", device=0 if device == "cuda" else -1)
20
  print("✓ Models loaded successfully")
21
  except Exception as e:
22
  print(f"Model loading error: {e}")
 
23
  try:
24
+ summarizer = pipeline("summarization", model="sshleifer/distilbart-cnn-6-6", device=0 if device == "cuda" else -1)
25
+ text_generator = pipeline("text-generation", model="distilgpt2", device=0 if device == "cuda" else -1)
26
  print("✓ Fallback models loaded successfully")
27
  except:
28
+ print("Critical error loading models. Exiting.")
29
 
30
+ # Progress tracking
31
  progress_status = {"value": 0, "desc": ""}
32
 
33
  def update_progress(value, desc=""):
 
34
  progress_status["value"] = value
35
  progress_status["desc"] = desc
36
  print(f"Progress: {value}% - {desc}")
37
 
38
+ # Simplified summary generation
39
  def generate_summary(topic):
 
40
  try:
41
+ update_progress(10, "Starting your idea analysis")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
 
43
  if "AI" in topic or "artificial intelligence" in topic.lower():
44
+ ai_component = "The AI part makes it super smart by figuring out patterns and doing tasks automatically!"
45
  else:
46
+ ai_component = "Adding some AI could make it even cooler by helping with data or tasks."
47
 
 
48
  if "urban farming" in topic.lower() or "agriculture" in topic.lower():
49
  summary = f"""
50
+ Your idea, {topic}, is a fresh way to grow food right in cities!
51
+
52
+ What it’s about: This startup uses cool tech to grow fresh food indoors or on rooftops. Think sensors and smart systems to keep plants happy.
53
+
54
+ Who it’s for: People like restaurants wanting fresh veggies, city folks who love healthy food, or stores looking to cut delivery costs.
55
+
56
+ How it works: You’d need things like sensors, grow lights, and a smart app to run it all. {ai_component}
57
+
58
+ How it makes money: Sell to businesses, charge people monthly for fresh food, or share your tech with others.
59
+
60
+ Why it stands out: Fresher food, less travel, and it works all year—no matter the weather!
61
+
62
+ Things to watch out for: It might cost a lot to start, and some cities have rules about this.
63
  """
64
  else:
 
65
  outputs = text_generator(
66
+ f"A startup idea about {topic} could be:",
67
+ max_length=300,
68
  num_return_sequences=1,
69
  temperature=0.7
70
  )
 
71
  generated_text = outputs[0]['generated_text']
 
72
  summary = f"""
73
+ Your idea, {topic}, is a cool way to make something better!
74
+
75
+ What it’s about: This startup helps with [problem/need] by using [simple solution]. It’s special because [unique bit].
76
+
77
+ Who it’s for: People like [customer type] would love this—think [example customers].
78
+
79
+ How it works: You’d need [basic tools/tech] to build it. {ai_component}
80
+
81
+ How it makes money: You could [simple revenue idea, e.g., sell it or charge a fee].
82
+
83
+ Why it stands out: It’s better because [easy advantage, e.g., it’s simpler or cheaper].
84
+
85
+ Things to watch out for: Starting might be tough because of [simple challenge, e.g., money or time].
86
 
87
  {generated_text}
 
 
 
 
88
  """
89
 
90
+ update_progress(60, "Idea analysis ready")
91
  return summary
92
 
93
  except Exception as e:
94
+ print(f"Error generating summary: {str(e)}\n{traceback.format_exc()}")
95
+ return f"Oops! Something went wrong: {str(e)}. Try again with a new idea!"
 
96
 
97
+ # Responsive interactive mind map with Plotly
98
  def generate_mindmap(topic, summary):
 
99
  try:
100
+ update_progress(70, "Building your idea map")
101
 
 
102
  concepts = [
103
+ "What It Does", "Who It’s For", "How It Makes Money",
104
+ "What Makes It Special", "What’s Needed", "Next Steps"
 
 
 
105
  ]
 
 
106
  subconcepts = {
107
+ "What It Does": ["Main Idea", "Key Features"],
108
+ "Who It’s For": ["Customers", "Why They’d Like It"],
109
+ "How It Makes Money": ["Ways to Earn", "Pricing"],
110
+ "What Makes It Special": ["Standout Points", "Edge Over Others"],
111
+ "What’s Needed": ["Tools or Tech", "Skills"],
112
+ "Next Steps": ["First Moves", "Goals"]
113
  }
114
 
115
+ nodes = [topic] + concepts + [sub for concept in concepts for sub in subconcepts[concept]]
116
+ edges = [(topic, concept) for concept in concepts] + \
117
+ [(concept, sub) for concept in concepts for sub in subconcepts[concept]]
118
+
119
+ pos = {topic: (0, 0)}
120
+ for i, concept in enumerate(concepts):
121
+ pos[concept] = (np.cos(i * 2 * np.pi / len(concepts)), np.sin(i * 2 * np.pi / len(concepts)))
122
+ for j, sub in enumerate(subconcepts[concept]):
123
+ pos[sub] = (pos[concept][0] + 0.3 * np.cos(j), pos[concept][1] + 0.3 * np.sin(j))
124
+
125
+ edge_x, edge_y = [], []
126
+ for edge in edges:
127
+ x0, y0 = pos[edge[0]]
128
+ x1, y1 = pos[edge[1]]
129
+ edge_x.extend([x0, x1, None])
130
+ edge_y.extend([y0, y1, None])
131
+
132
+ node_x, node_y = [pos[node][0] for node in nodes], [pos[node][1] for node in nodes]
133
+
134
+ fig = go.Figure()
135
+ fig.add_trace(go.Scatter(x=edge_x, y=edge_y, mode='lines', line=dict(color='gray', width=1), hoverinfo='none'))
136
+ fig.add_trace(go.Scatter(
137
+ x=node_x, y=node_y, mode='markers+text', text=nodes, textposition="middle center",
138
+ marker=dict(size=[30 if n == topic else 20 if n in concepts else 15 for n in nodes],
139
+ color=['#FF9999' if n == topic else '#99CCFF' if n in concepts else '#CCFF99' for n in nodes]),
140
+ hoverinfo='text'
141
+ ))
142
+
143
+ # Responsive layout for mind map
144
+ fig.update_layout(
145
+ showlegend=False,
146
+ title=f"Your Idea Map: {topic}",
147
+ title_x=0.5,
148
+ paper_bgcolor='white',
149
+ plot_bgcolor='white',
150
+ autosize=True, # Makes it scale with container
151
+ margin=dict(l=20, r=20, t=40, b=20) # Reduced margins for mobile
152
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153
 
154
+ update_progress(100, "Idea map ready")
155
+ return fig
156
 
157
  except Exception as e:
158
+ print(f"Error generating mind map: {str(e)}")
159
+ return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
 
161
+ # Main processing
162
  def process_input(topic):
 
163
  try:
164
  if not topic or len(topic.strip()) < 3:
165
+ return "Please enter a real idea (3+ characters)!", None, gr.update(visible=False)
 
 
 
166
 
167
+ print(f"Processing: {topic}")
168
+ update_progress(0, "Starting your idea journey")
169
 
170
+ summary = f"Congrats! You’ve started shaping {topic}. Here’s your plan:\n\n{generate_summary(topic)}"
171
  mindmap = generate_mindmap(topic, summary)
172
 
173
  return summary, mindmap, gr.update(visible=True)
174
 
175
  except Exception as e:
176
+ print(f"Error in processing: {str(e)}\n{traceback.format_exc()}")
177
+ return f"Oops! Something broke: {str(e)}", None, gr.update(visible=True)
 
178
 
179
+ # Follow-up question processing
180
  def process_followup_question(question, current_summary):
 
181
  try:
182
  if not question or len(question.strip()) < 5:
183
+ return "Ask something longer (5+ characters)!", None
184
 
185
+ print(f"Follow-up: {question}")
 
 
186
  prompt = f"Question: {question}\n\nContext: {current_summary}\n\nAnswer:"
187
 
188
+ response = text_generator(prompt, max_length=300, num_return_sequences=1, temperature=0.7)
189
+ answer = response[0]['generated_text'].split("Answer:")[-1].strip()
 
 
 
 
 
 
 
 
 
 
190
 
191
+ return f"Here’s your answer:\n\n{answer}", None
 
192
 
193
  except Exception as e:
194
+ print(f"Error in follow-up: {str(e)}\n{traceback.format_exc()}")
195
+ return f"Sorry, something went wrong: {str(e)}", None
196
+
197
+ # Save and load ideas
198
+ def save_idea(name, summary, mindmap, saved):
199
+ if not name:
200
+ return gr.update(), "Please give your idea a name!"
201
+ saved[name] = {"summary": summary, "mindmap": mindmap}
202
+ return gr.update(choices=list(saved.keys())), f"Great job! '{name}' is saved—keep refining it!"
203
+
204
+ def load_idea(name, saved):
205
+ if name in saved:
206
+ return saved[name]["summary"], saved[name]["mindmap"], gr.update(visible=True)
207
+ return "No idea found with that name.", None, gr.update(visible=False)
208
+
209
+ # Custom CSS for responsiveness
210
+ custom_css = """
211
+ .container {
212
+ max-width: 100%;
213
+ padding: 10px;
214
+ }
215
+ .header {
216
+ font-size: clamp(1.5rem, 4vw, 2.5rem);
217
+ text-align: center;
218
+ }
219
+ .textbox {
220
+ width: 100% !important;
221
+ min-height: 100px;
222
+ }
223
+ .button {
224
+ width: 100%;
225
+ margin: 5px 0;
226
+ }
227
+ .plot {
228
+ width: 100% !important;
229
+ height: auto !important;
230
+ }
231
+ @media (max-width: 768px) {
232
+ .gr-row {
233
+ flex-direction: column !important;
234
+ }
235
+ .gr-column {
236
+ width: 100% !important;
237
+ }
238
+ }
239
+ """
240
 
241
+ # Gradio interface
242
+ with gr.Blocks(theme=gr.themes.Glass(primary_hue="blue", secondary_hue="green"), css=custom_css) as demo:
243
  current_summary = gr.State("")
244
+ saved_ideas = gr.State({})
245
+
246
+ gr.Markdown("# 🧠 VisualMind - Grow Your Startup Idea", elem_classes=["header"])
247
+ gr.Markdown("Dream up a startup idea and watch it come to life—works great on any screen!", elem_classes=["container"])
248
+
249
+ with gr.Tabs():
250
+ with gr.Tab("Create Your Idea"):
251
+ with gr.Row(equal_height=False):
252
+ with gr.Column(scale=1, min_width=300):
253
+ topic_input = gr.Textbox(
254
+ label="Your Startup Idea",
255
+ placeholder="e.g., Smart Urban Gardens",
256
+ elem_classes=["textbox"]
257
+ )
258
+ submit_btn = gr.Button("Build My Idea", variant="primary", elem_classes=["button"])
 
 
 
 
259
 
260
+ progress = gr.Textbox(label="What’s Happening", value="Ready to start!", interactive=False, elem_classes=["textbox"])
261
+
262
+ with gr.Row(visible=False, equal_height=False) as results_container:
263
+ with gr.Column(scale=1, min_width=300):
264
+ summary_output = gr.Textbox(label="Your Idea Plan", lines=10, elem_classes=["textbox"])
265
+
266
+ with gr.Accordion("Explore More", open=False):
267
+ followup_input = gr.Textbox(
268
+ label="Got a question?",
269
+ placeholder="e.g., How do I start small?",
270
+ elem_classes=["textbox"]
271
+ )
272
+ followup_btn = gr.Button("Ask Away", elem_classes=["button"])
273
+ followup_output = gr.Textbox(label="Here’s What We Think", lines=5, elem_classes=["textbox"])
274
+ gr.Markdown("""
275
+ ### Need Ideas to Ask?
276
+ - How can I make this stand out?
277
+ - Who might love this?
278
+ - What’s my first step?
279
+ """)
280
+
281
+ with gr.Column(scale=1, min_width=300):
282
+ mindmap_output = gr.Plot(label="Your Interactive Idea Map", elem_classes=["plot"])
283
+
284
+ with gr.Row(equal_height=False):
285
+ with gr.Column(scale=1, min_width=150):
286
+ idea_name = gr.Textbox(label="Name Your Idea", placeholder="e.g., GreenCity", elem_classes=["textbox"])
287
+ save_btn = gr.Button("Save Idea", elem_classes=["button"])
288
+ with gr.Column(scale=1, min_width=150):
289
+ load_dropdown = gr.Dropdown(label="Pick a Saved Idea", choices=[], elem_classes=["textbox"])
290
+ load_btn = gr.Button("Load Idea", elem_classes=["button"])
291
+
292
+ with gr.Tab("Tips & Inspiration"):
293
+ gr.Markdown("""
294
+ ### Spark Your Next Big Thing!
295
+ - **Example:** *Eco-Friendly Packaging* - Uses plant-based materials and smart tech to cut waste.
296
+ - **Try This:** Think of something you wish worked better—how could an app or gadget fix it?
297
+ - **Tip:** Start small—test your idea with friends first!
298
+ """, elem_classes=["container"])
299
+
300
+ # Event handlers
301
  submit_btn.click(
302
+ fn=lambda: "Bringing your idea to life...",
303
  inputs=None,
304
  outputs=progress
305
  ).then(
 
311
  inputs=[summary_output],
312
  outputs=[current_summary]
313
  ).then(
314
+ fn=lambda: "Ready for more questions—keep exploring!",
315
  inputs=None,
316
  outputs=progress
317
  )
318
 
319
  followup_btn.click(
320
+ fn=lambda: "Thinking about your question...",
321
  inputs=None,
322
  outputs=progress
323
  ).then(
 
325
  inputs=[followup_input, current_summary],
326
  outputs=[followup_output, mindmap_output]
327
  ).then(
328
+ fn=lambda: "All set—ask another if you’d like!",
329
  inputs=None,
330
  outputs=progress
331
  )
332
 
333
+ save_btn.click(
334
+ save_idea,
335
+ inputs=[idea_name, summary_output, mindmap_output, saved_ideas],
336
+ outputs=[load_dropdown, progress]
337
+ )
 
338
 
339
+ load_btn.click(
340
+ load_idea,
341
+ inputs=[load_dropdown, saved_ideas],
342
+ outputs=[summary_output, mindmap_output, results_container]
343
+ )
344
 
345
+ # Launch with error handling
346
  try:
347
+ demo.launch(debug=True, share=True, height=800) # Set a default height for better mobile scrolling
348
  except Exception as e:
349
+ print(f"Launch error: {e}")
 
350
  try:
351
+ demo.launch(debug=True, server_name="0.0.0.0", height=800)
352
  except Exception as e2:
353
+ print(f"Second launch failed: {e2}")
354
+ print("Try restarting your runtime or checking library installations.")