Spaces:
Running
Running
Create app
Browse files
app
ADDED
@@ -0,0 +1,379 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# VisualMind App - Fixed Version with Better Error Handling
|
2 |
+
# Install required libraries
|
3 |
+
#!pip install gradio>=3.50.2 transformers>=4.30.0 networkx>=3.0 matplotlib>=3.7.0 torch>=2.0.0
|
4 |
+
|
5 |
+
import gradio as gr
|
6 |
+
import torch
|
7 |
+
import networkx as nx
|
8 |
+
import matplotlib.pyplot as plt
|
9 |
+
from transformers import pipeline
|
10 |
+
import numpy as np
|
11 |
+
import io
|
12 |
+
from PIL import Image
|
13 |
+
import time
|
14 |
+
import traceback
|
15 |
+
|
16 |
+
# Check if GPU is available and print device info
|
17 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
18 |
+
print(f"Using device: {device}")
|
19 |
+
|
20 |
+
# Load simpler text generation model for faster processing
|
21 |
+
try:
|
22 |
+
summarizer = pipeline("summarization", model="facebook/bart-large-cnn", device=0 if device=="cuda" else -1)
|
23 |
+
text_generator = pipeline("text-generation", model="gpt2", device=0 if device=="cuda" else -1)
|
24 |
+
print("✓ Models loaded successfully")
|
25 |
+
except Exception as e:
|
26 |
+
print(f"Model loading error: {e}")
|
27 |
+
# Fallback to smaller models
|
28 |
+
try:
|
29 |
+
summarizer = pipeline("summarization", model="sshleifer/distilbart-cnn-6-6", device=0 if device=="cuda" else -1)
|
30 |
+
text_generator = pipeline("text-generation", model="distilgpt2", device=0 if device=="cuda" else -1)
|
31 |
+
print("✓ Fallback models loaded successfully")
|
32 |
+
except:
|
33 |
+
print("Critical error loading models. Please check your environment.")
|
34 |
+
|
35 |
+
# Add progress tracking
|
36 |
+
progress_status = {"value": 0, "desc": ""}
|
37 |
+
|
38 |
+
def update_progress(value, desc=""):
|
39 |
+
"""Update progress tracking"""
|
40 |
+
progress_status["value"] = value
|
41 |
+
progress_status["desc"] = desc
|
42 |
+
print(f"Progress: {value}% - {desc}")
|
43 |
+
|
44 |
+
# Function to generate summary (with error handling)
|
45 |
+
def generate_summary(topic):
|
46 |
+
"""Generate a textual summary based on the input topic"""
|
47 |
+
try:
|
48 |
+
update_progress(10, "Starting summary generation")
|
49 |
+
|
50 |
+
# Create a prompt about the topic
|
51 |
+
prompt = f"""
|
52 |
+
Provide a comprehensive analysis of the startup idea: {topic}
|
53 |
+
|
54 |
+
Include the following sections:
|
55 |
+
- Core concept and value proposition
|
56 |
+
- Target market and potential customers
|
57 |
+
- Technology and implementation requirements
|
58 |
+
- Business model and revenue streams
|
59 |
+
- Competitive advantages and market positioning
|
60 |
+
- Potential challenges and mitigation strategies
|
61 |
+
"""
|
62 |
+
|
63 |
+
# For startup ideas, generate more custom content rather than summarizing
|
64 |
+
# This simulates what the T5 model would do but with better error handling
|
65 |
+
update_progress(30, "Processing topic information")
|
66 |
+
|
67 |
+
if "AI" in topic or "artificial intelligence" in topic.lower():
|
68 |
+
ai_component = "The artificial intelligence component provides significant competitive advantage through data analysis and automation."
|
69 |
+
else:
|
70 |
+
ai_component = "Adding AI capabilities could enhance this startup idea through data analysis and process automation."
|
71 |
+
|
72 |
+
# Generate the summary with custom components for startup ideas
|
73 |
+
if "urban farming" in topic.lower() or "agriculture" in topic.lower():
|
74 |
+
summary = f"""
|
75 |
+
{topic} represents an innovative approach to addressing food security and sustainability challenges.
|
76 |
+
|
77 |
+
Core concept: This startup combines advanced technologies with urban agriculture to enable efficient food production in cities. The solution uses sensor networks and intelligent systems to monitor and optimize growing conditions in vertical farms, rooftop gardens, or indoor growing facilities.
|
78 |
+
|
79 |
+
Target market: Primary customers include restaurants seeking farm-to-table produce, health-conscious urban consumers, grocery chains, and food service companies looking to reduce supply chain costs.
|
80 |
+
|
81 |
+
Technology requirements: The implementation requires IoT sensor arrays, climate control systems, a central AI management platform, and mobile applications for monitoring. {ai_component}
|
82 |
+
|
83 |
+
Business model: Revenue streams include B2B installations for commercial clients, subscription services for consumers, technology licensing, and potentially data monetization from agricultural insights.
|
84 |
+
|
85 |
+
Competitive advantages: The solution offers fresher produce with lower transportation costs, reduced water usage (up to 95% compared to traditional farming), and year-round growing capabilities regardless of external climate conditions.
|
86 |
+
|
87 |
+
Challenges include high initial capital costs, technical complexity, regulatory hurdles in some urban areas, and the need to demonstrate ROI to potential customers.
|
88 |
+
"""
|
89 |
+
else:
|
90 |
+
# Generic startup analysis if not urban farming/agriculture
|
91 |
+
outputs = text_generator(
|
92 |
+
f"A startup idea focused on {topic} would involve the following elements:",
|
93 |
+
max_length=500,
|
94 |
+
num_return_sequences=1,
|
95 |
+
temperature=0.7
|
96 |
+
)
|
97 |
+
|
98 |
+
generated_text = outputs[0]['generated_text']
|
99 |
+
# Clean up and structure the output
|
100 |
+
summary = f"""
|
101 |
+
{topic} represents an innovative startup opportunity with significant market potential.
|
102 |
+
|
103 |
+
Core concept: This startup addresses [specific problem/need] by providing [solution approach] through innovative technology and business model design.
|
104 |
+
|
105 |
+
{generated_text}
|
106 |
+
|
107 |
+
{ai_component}
|
108 |
+
|
109 |
+
Major challenges would include initial funding requirements, building the right team with domain expertise, and establishing market traction against existing competitors.
|
110 |
+
"""
|
111 |
+
|
112 |
+
update_progress(60, "Summary generated")
|
113 |
+
return summary
|
114 |
+
|
115 |
+
except Exception as e:
|
116 |
+
error_msg = f"Error generating summary: {str(e)}\n{traceback.format_exc()}"
|
117 |
+
print(error_msg)
|
118 |
+
return f"Unable to generate summary. Error: {str(e)}\n\nPlease try again with a different topic or check your connection."
|
119 |
+
|
120 |
+
# Function to generate mind map
|
121 |
+
def generate_mindmap(topic, summary):
|
122 |
+
"""Generate a mind map based on the topic and summary"""
|
123 |
+
try:
|
124 |
+
update_progress(70, "Creating mind map")
|
125 |
+
|
126 |
+
# Extract concepts from summary (simplified for reliability)
|
127 |
+
concepts = [
|
128 |
+
"Core Technology",
|
129 |
+
"Market Opportunity",
|
130 |
+
"Business Model",
|
131 |
+
"Competitive Advantage",
|
132 |
+
"Implementation Challenges"
|
133 |
+
]
|
134 |
+
|
135 |
+
# For each concept, create subconcepts
|
136 |
+
subconcepts = {
|
137 |
+
"Core Technology": ["Key Components", "Technical Requirements", "Development Roadmap"],
|
138 |
+
"Market Opportunity": ["Target Customers", "Market Size", "Growth Potential"],
|
139 |
+
"Business Model": ["Revenue Streams", "Pricing Strategy", "Partnership Opportunities"],
|
140 |
+
"Competitive Advantage": ["Unique Selling Points", "Barriers to Entry", "IP Protection"],
|
141 |
+
"Implementation Challenges": ["Resource Requirements", "Regulatory Concerns", "Timeline"]
|
142 |
+
}
|
143 |
+
|
144 |
+
# Create a graph
|
145 |
+
G = nx.Graph()
|
146 |
+
|
147 |
+
# Add the central node
|
148 |
+
G.add_node(topic, size=2000)
|
149 |
+
|
150 |
+
# Add concept nodes and connect to central node
|
151 |
+
for concept in concepts:
|
152 |
+
G.add_node(concept, size=1000)
|
153 |
+
G.add_edge(topic, concept, weight=2)
|
154 |
+
|
155 |
+
# Add subconcept nodes and connect to concept nodes
|
156 |
+
for subconcept in subconcepts[concept]:
|
157 |
+
G.add_node(subconcept, size=500)
|
158 |
+
G.add_edge(concept, subconcept, weight=1)
|
159 |
+
|
160 |
+
update_progress(80, "Designing visualization")
|
161 |
+
|
162 |
+
# Create a visually appealing mind map
|
163 |
+
plt.figure(figsize=(14, 10))
|
164 |
+
|
165 |
+
# Use a more stable layout algorithm
|
166 |
+
pos = nx.spring_layout(G, k=0.5, seed=42)
|
167 |
+
|
168 |
+
# Draw nodes with different sizes and colors
|
169 |
+
node_sizes = [G.nodes[node].get('size', 300) for node in G.nodes()]
|
170 |
+
|
171 |
+
# Color scheme: main topic, concepts, subconcepts
|
172 |
+
node_colors = []
|
173 |
+
for node in G.nodes():
|
174 |
+
if node == topic:
|
175 |
+
node_colors.append('#FF9999') # Red for main topic
|
176 |
+
elif node in concepts:
|
177 |
+
node_colors.append('#99CCFF') # Blue for main concepts
|
178 |
+
else:
|
179 |
+
node_colors.append('#CCFF99') # Green for subconcepts
|
180 |
+
|
181 |
+
# Draw the network elements
|
182 |
+
nx.draw_networkx_nodes(G, pos, node_size=node_sizes, node_color=node_colors, alpha=0.8)
|
183 |
+
nx.draw_networkx_edges(G, pos, width=1.5, alpha=0.5, edge_color='gray')
|
184 |
+
|
185 |
+
# Custom node labels with different font sizes
|
186 |
+
for node, (x, y) in pos.items():
|
187 |
+
font_size = 15 if node == topic else 12 if node in concepts else 9
|
188 |
+
plt.text(
|
189 |
+
x, y, node,
|
190 |
+
fontsize=font_size,
|
191 |
+
ha='center', va='center',
|
192 |
+
bbox=dict(boxstyle="round,pad=0.3", fc='white', ec='gray', alpha=0.8)
|
193 |
+
)
|
194 |
+
|
195 |
+
plt.title(f"Mind Map: {topic}", fontsize=16)
|
196 |
+
plt.axis('off')
|
197 |
+
|
198 |
+
# Save figure to bytes
|
199 |
+
buf = io.BytesIO()
|
200 |
+
plt.savefig(buf, format='png', dpi=150, bbox_inches='tight')
|
201 |
+
buf.seek(0)
|
202 |
+
|
203 |
+
# Convert to base64 for displaying in gradio
|
204 |
+
mindmap_img = Image.open(buf)
|
205 |
+
plt.close()
|
206 |
+
|
207 |
+
update_progress(100, "Mind map completed")
|
208 |
+
return mindmap_img
|
209 |
+
|
210 |
+
except Exception as e:
|
211 |
+
error_msg = f"Error generating mind map: {str(e)}\n{traceback.format_exc()}"
|
212 |
+
print(error_msg)
|
213 |
+
|
214 |
+
# Create a simple error image
|
215 |
+
plt.figure(figsize=(10, 6))
|
216 |
+
plt.text(0.5, 0.5, f"Mind map generation error:\n{str(e)}",
|
217 |
+
ha='center', va='center', fontsize=12, color='red',
|
218 |
+
bbox=dict(facecolor='white', alpha=0.8))
|
219 |
+
plt.axis('off')
|
220 |
+
|
221 |
+
# Save error image
|
222 |
+
buf = io.BytesIO()
|
223 |
+
plt.savefig(buf, format='png', dpi=100)
|
224 |
+
buf.seek(0)
|
225 |
+
error_img = Image.open(buf)
|
226 |
+
plt.close()
|
227 |
+
|
228 |
+
return error_img
|
229 |
+
|
230 |
+
# Main processing function
|
231 |
+
def process_input(topic):
|
232 |
+
"""Process the main topic input"""
|
233 |
+
try:
|
234 |
+
if not topic or len(topic.strip()) < 3:
|
235 |
+
return "Please enter a valid topic (at least 3 characters).", None, gr.update(visible=False)
|
236 |
+
|
237 |
+
print(f"Processing topic: {topic}")
|
238 |
+
update_progress(0, "Starting processing")
|
239 |
+
|
240 |
+
# Generate summary
|
241 |
+
summary = generate_summary(topic)
|
242 |
+
|
243 |
+
# Generate mind map
|
244 |
+
mindmap = generate_mindmap(topic, summary)
|
245 |
+
|
246 |
+
return summary, mindmap, gr.update(visible=True)
|
247 |
+
|
248 |
+
except Exception as e:
|
249 |
+
error_msg = f"Error in main processing: {str(e)}\n{traceback.format_exc()}"
|
250 |
+
print(error_msg)
|
251 |
+
return f"An error occurred: {str(e)}", None, gr.update(visible=True)
|
252 |
+
|
253 |
+
# Function to process follow-up questions
|
254 |
+
def process_followup_question(question, current_summary):
|
255 |
+
"""Process a follow-up question"""
|
256 |
+
try:
|
257 |
+
if not question or len(question.strip()) < 5:
|
258 |
+
return "Please enter a valid question (at least 5 characters).", None
|
259 |
+
|
260 |
+
print(f"Processing follow-up question: {question}")
|
261 |
+
|
262 |
+
# Generate answer using the text generator
|
263 |
+
prompt = f"Question: {question}\n\nContext: {current_summary}\n\nAnswer:"
|
264 |
+
|
265 |
+
response = text_generator(
|
266 |
+
prompt,
|
267 |
+
max_length=300,
|
268 |
+
num_return_sequences=1,
|
269 |
+
temperature=0.7
|
270 |
+
)
|
271 |
+
|
272 |
+
# Extract and clean up the generated text
|
273 |
+
answer_text = response[0]['generated_text']
|
274 |
+
# Find where the answer starts
|
275 |
+
answer_start = answer_text.find("Answer:") + 7 if "Answer:" in answer_text else 0
|
276 |
+
answer = answer_text[answer_start:].strip()
|
277 |
+
|
278 |
+
# For simplicity in this fixed version, we'll return the same mindmap
|
279 |
+
return answer, None
|
280 |
+
|
281 |
+
except Exception as e:
|
282 |
+
error_msg = f"Error processing follow-up: {str(e)}\n{traceback.format_exc()}"
|
283 |
+
print(error_msg)
|
284 |
+
return f"An error occurred while processing your question: {str(e)}", None
|
285 |
+
|
286 |
+
# Create the Gradio interface
|
287 |
+
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
288 |
+
current_summary = gr.State("")
|
289 |
+
|
290 |
+
gr.Markdown("# 🧠 VisualMind Startup Idea Generator")
|
291 |
+
gr.Markdown("Enter a startup idea topic to generate a comprehensive analysis and mind map.")
|
292 |
+
|
293 |
+
with gr.Row():
|
294 |
+
with gr.Column(scale=1):
|
295 |
+
topic_input = gr.Textbox(
|
296 |
+
label="Enter Startup Topic",
|
297 |
+
placeholder="e.g., AI-Powered Urban Farming, Blockchain for Supply Chain, Virtual Reality Education"
|
298 |
+
)
|
299 |
+
submit_btn = gr.Button("Generate Analysis", variant="primary")
|
300 |
+
|
301 |
+
# Add a visible progress bar
|
302 |
+
with gr.Row():
|
303 |
+
progress = gr.Textbox(label="Status", value="Ready to generate", interactive=False)
|
304 |
+
|
305 |
+
with gr.Row(visible=False) as results_container:
|
306 |
+
with gr.Column(scale=1):
|
307 |
+
summary_output = gr.Textbox(label="Startup Analysis", lines=15)
|
308 |
+
|
309 |
+
with gr.Accordion("Ask Follow-up Questions", open=False):
|
310 |
+
followup_input = gr.Textbox(
|
311 |
+
label="Ask a question about this startup idea",
|
312 |
+
placeholder="e.g., What would be the initial funding requirements?"
|
313 |
+
)
|
314 |
+
followup_btn = gr.Button("Get Answer")
|
315 |
+
followup_output = gr.Textbox(label="Answer", lines=5)
|
316 |
+
|
317 |
+
with gr.Column(scale=1):
|
318 |
+
mindmap_output = gr.Image(label="Mind Map", type="pil")
|
319 |
+
|
320 |
+
# Set up event handlers
|
321 |
+
def update_status():
|
322 |
+
return f"{progress_status['value']}% - {progress_status['desc']}"
|
323 |
+
|
324 |
+
submit_btn.click(
|
325 |
+
fn=lambda: "Processing your startup topic...",
|
326 |
+
inputs=None,
|
327 |
+
outputs=progress
|
328 |
+
).then(
|
329 |
+
process_input,
|
330 |
+
inputs=[topic_input],
|
331 |
+
outputs=[summary_output, mindmap_output, results_container]
|
332 |
+
).then(
|
333 |
+
fn=lambda s: s,
|
334 |
+
inputs=[summary_output],
|
335 |
+
outputs=[current_summary]
|
336 |
+
).then(
|
337 |
+
fn=lambda: "Ready for follow-up questions",
|
338 |
+
inputs=None,
|
339 |
+
outputs=progress
|
340 |
+
)
|
341 |
+
|
342 |
+
followup_btn.click(
|
343 |
+
fn=lambda: "Processing your question...",
|
344 |
+
inputs=None,
|
345 |
+
outputs=progress
|
346 |
+
).then(
|
347 |
+
process_followup_question,
|
348 |
+
inputs=[followup_input, current_summary],
|
349 |
+
outputs=[followup_output, mindmap_output]
|
350 |
+
).then(
|
351 |
+
fn=lambda: "Follow-up processed",
|
352 |
+
inputs=None,
|
353 |
+
outputs=progress
|
354 |
+
)
|
355 |
+
|
356 |
+
gr.Markdown("### How It Works")
|
357 |
+
gr.Markdown("""
|
358 |
+
1. Enter a startup idea topic in the input field and click 'Generate Analysis'
|
359 |
+
2. The AI will analyze your startup concept and generate a comprehensive breakdown
|
360 |
+
3. A mind map will be created showing key aspects of the business idea
|
361 |
+
4. Ask follow-up questions to explore specific aspects of the startup concept
|
362 |
+
|
363 |
+
Note: This tool is for brainstorming and ideation purposes. Real startup validation requires market research and testing.
|
364 |
+
""")
|
365 |
+
|
366 |
+
# Launch the app with better error handling
|
367 |
+
try:
|
368 |
+
demo.launch(debug=True, share=True)
|
369 |
+
except Exception as e:
|
370 |
+
print(f"Error launching Gradio app: {e}")
|
371 |
+
print("Trying alternative launch method...")
|
372 |
+
try:
|
373 |
+
demo.launch(debug=True, server_name="0.0.0.0")
|
374 |
+
except Exception as e2:
|
375 |
+
print(f"Second launch attempt failed: {e2}")
|
376 |
+
print("\nTROUBLESHOOTING TIPS:")
|
377 |
+
print("1. Make sure you're running this in Google Colab")
|
378 |
+
print("2. Try Runtime > Restart runtime and run again")
|
379 |
+
print("3. Check that all libraries installed correctly")
|