File size: 1,660 Bytes
93c8853
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import gradio as gr
import torch
from huggingface_hub import hf_hub_download

def load_model_from_hub(repo_id, filename):
    model_path = hf_hub_download(repo_id=repo_id, filename=filename)
    model = torch.load(model_path, weights_only=False, map_location='cpu')
    model.eval()
    return model

def predict(text, model):
    with torch.no_grad():
        output = model(text)
        return float(output)

def create_gradio_app():
    repo_id = "jane-street/2025-03-10"
    model_filename = "model.pt"
    model = load_model_from_hub(repo_id, model_filename)
    
    with gr.Blocks() as demo:
        gr.Markdown('''        
        Today I went on a hike and found a pile of tensors hidden underneath a neolithic burial mound!
        
        I sent it over to the local neural plumber, and they managed to cobble together this.
        
        **[model.pt](https://huggingface.co/jane-street/2025-03-10/blob/main/model.pt)**
                    
        Anyway, I'm not sure what it does yet, but it must have been important to this past civilization. 
        Maybe start by looking at the last two layers. 
        ''')
        
        input_text = gr.Textbox(label="Model Input", value='vegetable dog') # two words?
        output = gr.Number(label="Model Output")
        
        input_text.submit(fn=lambda x: predict(x, model), inputs=input_text, outputs=output)
        
        gr.Markdown('''
        If you do figure it out, please let us know at *[email protected]*.
        ''')

    demo.queue(max_size=1_000)
    
    return demo

    
if __name__ == "__main__":
    app = create_gradio_app()
    app.launch(show_api=False)