File size: 6,053 Bytes
a98d89c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6b8bef6
 
 
 
 
a98d89c
6b8bef6
a98d89c
6b8bef6
a98d89c
 
 
 
6b8bef6
 
 
a98d89c
 
6b8bef6
a98d89c
6b8bef6
 
 
 
 
a98d89c
6b8bef6
 
 
 
 
 
 
 
 
a98d89c
6b8bef6
 
 
 
 
 
 
 
a98d89c
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
import streamlit as st
import json

urls = [
    "https://huggingface.co/spaces/awacke1/CB-GR-Chatbot-Blenderbot",
    "https://huggingface.co/spaces/awacke1/TTS-STT-Blocks",
    "https://huggingface.co/spaces/awacke1/Prompt-Refinery-Text-to-Image-Generation",
    "https://huggingface.co/spaces/awacke1/Video-Summary",
    "https://huggingface.co/spaces/awacke1/AI-MovieMaker-Comedy",
    "https://huggingface.co/spaces/awacke1/ChatGPT-Memory-Chat-Story-Generator",
    "https://huggingface.co/spaces/awacke1/CloneAnyVoice",
    "https://huggingface.co/spaces/awacke1/ChatGPT-Streamlit-2",
    "https://huggingface.co/spaces/awacke1/WikipediaUltimateAISearch",
    "https://huggingface.co/spaces/awacke1/RLHF.Cognitive.Episodic.Semantic.Memory",
    "https://huggingface.co/spaces/awacke1/Memory-Shared",
    "https://huggingface.co/spaces/awacke1/VideoSwap",
    "https://huggingface.co/spaces/awacke1/AI-Wikipedia-Search",
    "https://huggingface.co/spaces/awacke1/AutoMLUsingStreamlit-Plotly",
    "https://huggingface.co/spaces/awacke1/NLP-Lyric-Chorus-Image",
    "https://huggingface.co/spaces/awacke1/OpenAssistant-Chatbot-FTW-Open-Source",
    "https://huggingface.co/spaces/awacke1/ChatGPTStreamlit7",
    "https://huggingface.co/spaces/awacke1/MultiPDF-QA-ChatGPT-Langchain",
    "https://huggingface.co/spaces/awacke1/SOTA-Plan",
    "https://huggingface.co/spaces/awacke1/AIandSmartTools",
    "https://huggingface.co/spaces/awacke1/3DVirtualFood",
    "https://huggingface.co/spaces/awacke1/Gradio-Gallery-Health-Medical-Icon-Sets",
    "https://huggingface.co/spaces/awacke1/DatasetAnalyzer",
    "https://huggingface.co/spaces/awacke1/PrompTart",
    "https://huggingface.co/spaces/awacke1/sileod-deberta-v3-base-tasksource-nli",
    "https://huggingface.co/spaces/awacke1/File-Memory-Operations-Human-Feedback-Gradio",
    "https://huggingface.co/spaces/awacke1/Bloom.Big.Science.Continual.Generator",
    "https://huggingface.co/spaces/awacke1/Ontology-Gradio",
    "https://huggingface.co/spaces/awacke1/HTML5-Aframe-3dMap-Flight",
    "https://huggingface.co/spaces/awacke1/Bloom.Generative.Writer",
    "https://huggingface.co/spaces/awacke1/Voice-ChatGPT-Streamlit-12",
    "https://huggingface.co/spaces/awacke1/HTML5-AR-VR",
    "https://huggingface.co/spaces/awacke1/AnimationAI",
    "https://huggingface.co/spaces/awacke1/GenerativeWordsandImages",
    "https://huggingface.co/spaces/awacke1/AR-VR-IOT-Demo",
    "https://huggingface.co/spaces/awacke1/ArtStyleFoodsandNutrition",
    "https://huggingface.co/spaces/awacke1/CarePlanQnAWithContext",
    "https://huggingface.co/spaces/awacke1/VideoSummaryYoutube3",
    "https://huggingface.co/spaces/awacke1/AW-01ST-CSV-Dataset-Analyzer",
    "https://huggingface.co/spaces/awacke1/Try.Playing.Learning.Sharing.On.This",
    "https://huggingface.co/spaces/awacke1/google-flan-t5-base",
    "https://huggingface.co/spaces/awacke1/PubMed-Parrot-Paraphraser-on-T5",
    "https://huggingface.co/spaces/awacke1/Writing-Grammar-And-Paraphrase-w-Pegasus",
    "https://huggingface.co/spaces/awacke1/runwayml-stable-diffusion-v1-5",
    "https://huggingface.co/spaces/awacke1/DockerGoFlanT5",
    "https://huggingface.co/spaces/awacke1/GradioContinualGenerator",
    "https://huggingface.co/spaces/awacke1/StreamlitSuperPowerCheatSheet"
]

# Extract the last part of each URL (after the last '/') to serve as the name of the button
url_names = [url.split('/')[-1] for url in urls]

# Associate each URL with a relevant emoji based on keywords in its name
emoji_mapping = {
    "Chatbot": "πŸ€–",
    "TTS": "πŸ—£οΈ",
    "STT": "πŸ‘‚",
    "Video": "πŸŽ₯",
    "MovieMaker": "🍿",
    "ChatGPT": "πŸ’¬",
    "Voice": "πŸŽ™οΈ",
    "Wikipedia": "πŸ“–",
    "Memory": "🧠",
    "AI": "🧠",
    "OpenAssistant": "🀝",
    "3D": "πŸ•ΆοΈ",
    "AR": "πŸ‘“",
    "VR": "πŸ•ΆοΈ",
    "Animation": "πŸ–ŒοΈ",
    "Dataset": "πŸ“Š",
    "Gradio": "πŸ“»",
    "HTML5": "🌐",
    "Writing": "✍️",
    "Grammar": "πŸ–‹οΈ",
    "Paraphrase": "πŸ”„",
    "Streamlit": "🌠"
}

# Map each URL name to its most relevant emoji
url_emojis = []
for name in url_names:
    associated_emoji = "πŸ”—"  # Default emoji
    for keyword, emoji in emoji_mapping.items():
        if keyword in name:
            associated_emoji = emoji
            break
    url_emojis.append(associated_emoji)

#url_emojis[:5], url_names[:5]  # Display the first 5 URL names with their associated emojis

import streamlit as st
import json
import webbrowser

def get_emoji(name):
    for key, emoji in emoji_mapping.items():
        if key in name:
            return emoji
    return "πŸ”—"

def load_votes():
    try:
        with open("votes.json", "r") as f:
            return json.load(f)
    except FileNotFoundError:
        return {url: 0 for url in urls}

def save_votes(votes):
    with open("votes.json", "w") as f:
        json.dump(votes, f)

def main():
    votes = load_votes()
    
    # Create sorted list of items
    items = [{"url": url, "name": url.split('/')[-1], 
              "emoji": get_emoji(url.split('/')[-1]), 
              "votes": votes[url]} for url in urls]
    items.sort(key=lambda x: (-x["votes"], x["name"]))
    
    # Display buttons in grid
    cols = st.columns(4)
    for i, item in enumerate(items):
        with cols[i % 4]:
            if st.button(f"{item['emoji']} {item['name']}", key=item['url']):
                votes[item['url']] += 1
                save_votes(votes)
                st.rerun()
            st.write(f"Votes: {item['votes']}")
    
    # Display vote graph
    if any(votes.values()):
        source = ColumnDataSource({
            'names': [i["name"] for i in items if votes[i["url"]] > 0],
            'votes': [i["votes"] for i in items if votes[i["url"]] > 0]
        })
        p = figure(x_range=source.data['names'], height=350, title="Vote Counts")
        p.vbar(x='names', top='votes', width=0.9, source=source)
        p.xaxis.major_label_orientation = 1.2
        st.bokeh_chart(p)

if __name__ == "__main__":
    main()