import streamlit as st
import numpy as np
import torch
from torch import nn
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
# Set page config
st.set_page_config(page_title='Advanced NLP with Deep Learning', layout='wide')
# Title with styled emoji
st.markdown('
🤖 Advanced NLP with Deep Learning 🚀
', unsafe_allow_html=True)
# Section 1: Word Embeddings
st.markdown('📌 1. Word Embeddings
', unsafe_allow_html=True)
st.subheader('🔎 Definition:')
st.write("""
Word embeddings are dense vector representations of words where similar words have similar representations. They are essential for text-based deep learning models.
- **🔹 Word2Vec (Skip-gram & CBOW)**: Learns word representations based on context.
- **🔹 GloVe (Global Vectors)**: Uses word co-occurrence statistics to learn embeddings.
- **🔹 FastText**: Handles subword information, helping with out-of-vocabulary words.
""")
# Word2Vec Example
st.subheader('🧩 Word2Vec Example:')
sentence = st.text_area("Enter a sentence to visualize Word2Vec embeddings", "NLP is amazing and very useful.")
if st.button('🎨 Visualize Word2Vec'):
words = sentence.split()
embeddings = {word: np.random.rand(1, 50) for word in words}
st.write("**Word2Vec Embeddings (Random Example):**")
for word, emb in embeddings.items():
st.write(f"{word}: {emb.flatten()[:5]}...")
# Section 2: Sequence Models
st.markdown('📌 2. Sequence Models
', unsafe_allow_html=True)
st.subheader('🔎 Definition:')
st.write("""
Sequence models process sequential data like sentences and play a key role in NLP tasks like translation, summarization, and sentiment analysis.
- **🔹 RNN (Recurrent Neural Networks)**: Maintains memory of previous words.
- **🔹 LSTM (Long Short-Term Memory)**: Handles long-range dependencies.
- **🔹 GRU (Gated Recurrent Units)**: A simplified LSTM version.
""")
# RNN Example
st.subheader('🛠️ RNN Example (PyTorch):')
if st.button('🖥️ Show RNN Model Architecture'):
class SimpleRNN(nn.Module):
def __init__(self, input_size, hidden_size, output_size):
super(SimpleRNN, self).__init__()
self.rnn = nn.RNN(input_size, hidden_size, batch_first=True)
self.fc = nn.Linear(hidden_size, output_size)
def forward(self, x):
out, _ = self.rnn(x)
out = self.fc(out[:, -1, :])
return out
rnn_model = SimpleRNN(input_size=10, hidden_size=20, output_size=1)
st.write("**RNN Architecture:**")
st.write(rnn_model)
# Section 3: Attention Mechanisms
st.markdown('📌 3. Attention Mechanisms
', unsafe_allow_html=True)
st.subheader('🔎 Definition:')
st.write("""
Attention mechanisms allow models to focus on key parts of an input sequence, improving performance on tasks that require long-range dependencies.
- **🔹 Self-attention**: Assigns importance to different words.
- **🔹 Seq2Seq Models**: Encoder-decoder models used for translation.
- **🔹 Transformer**: Parallel processing for high efficiency.
""")
# Transformer Example
st.subheader('🛠️ Transformer Example (Simplified):')
if st.button('🖥️ Show Transformer Architecture'):
input_layer = keras.Input(shape=(None, 512))
attention_output = layers.MultiHeadAttention(num_heads=8, key_dim=512)(input_layer, input_layer)
pooled_output = layers.GlobalAveragePooling1D()(attention_output)
dense1 = layers.Dense(256, activation="relu")(pooled_output)
output_layer = layers.Dense(1)(dense1)
transformer_model = keras.Model(inputs=input_layer, outputs=output_layer)
st.write("**Transformer Architecture (Fixed Version):**")
st.write(transformer_model)
# Section 4: Key Attention Components
st.markdown('📌 4. Attention Components
', unsafe_allow_html=True)
st.subheader('🔍 Self-attention:')
st.write("""
Each word in a sequence attends to all other words and assigns an importance weight, capturing long-range dependencies.
""")
st.subheader('🔄 Seq2Seq:')
st.write("""
Used for translation, where an encoder processes input and a decoder generates output.
""")
st.subheader('⚡ Transformer:')
st.write("""
Revolutionized NLP by using self-attention in both encoder and decoder while processing all tokens in parallel.
""")
st.markdown('✨ Thanks for Exploring NLP! ✨
', unsafe_allow_html=True)