pbjs_gpt2 / app.py
PeterBrendan's picture
Update app.py
5625348
raw
history blame
2.47 kB
import streamlit as st
from transformers import pipeline
@st.cache_resource
def load_model():
return pipeline("text-generation", model="PeterBrendan/pbjs_gpt2")
def main():
if "generated_widget_id" not in st.session_state:
st.session_state["generated_widget_id"] = None
st.title("Pbjs GPT2")
st.write("**Model description:** This is a fine-tuned version of the GPT-2 model trained on a dataset of 350+ publisher domains Prebid config files. The model is designed to provide insights into how other publishers configure their Prebid settings. Given a Prebid config setting, like ***bidderTimeout***, it can generate sample Prebid configuration settings based on the collected data. The model aims to help publishers get an idea of how different publishers configure their Prebid settings.")
st.write("**Intended uses:** This model is intended to assist publishers in understanding and exploring how other publishers configure their Prebid settings. It can serve as a reference to gain insights into common configurations, best practices, and different approaches used by publishers across various domains.")
st.write("Enter some text like **bidderTimeout** and get a generated Prebid config output. Using **{** will generate a Prebid config from the begining.")
st.write("*Note:* The model does take some time to run")
st.markdown("[Link to official Prebid Documentation on pbjs.setConfig](https://docs.prebid.org/dev-docs/publisher-api-reference/setConfig.html)")
# Default prompts
default_prompts = ["{", "bidderTimeout", "bidderSequence", "Usebidcache", "customPriceBucket"]
# Create a selectbox for default prompts
default_prompt = st.selectbox("Choose a default prompt:", default_prompts)
# Create a text input field for custom prompt
custom_prompt = st.text_input("Enter a custom prompt:", "")
# Check if a default prompt is selected
if default_prompt:
user_input = default_prompt
else:
user_input = custom_prompt
# Check if the user input is empty
if user_input:
# Load the Hugging Face model
generator = load_model()
# Generate text based on user input
generated_text = generator(user_input, max_length=700, num_return_sequences=1)[0]["generated_text"]
# Display the generated text
st.write("Generated Text:")
st.write(generated_text)
# Run the app
if __name__ == "__main__":
main()