File size: 3,476 Bytes
27866da
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import base64
import datetime
import gradio as gr
import numpy as np
import os
import pytz
import psutil
import re
import random
import torch
import time
import shutil
import zipfile
from PIL import Image
from io import BytesIO
from diffusers import DiffusionPipeline, LCMScheduler, AutoencoderTiny

# ... [previous imports and setup code remains unchanged]

# New function to save prompt to history
def save_prompt_to_history(prompt):
    with open("prompt_history.txt", "a") as f:
        timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        f.write(f"{timestamp}: {prompt}\n")

# Modified predict function
def predict(prompt, guidance, steps, seed=1231231):
    generator = torch.manual_seed(seed)
    last_time = time.time()
    results = pipe(
        prompt=prompt,
        generator=generator,
        num_inference_steps=steps,
        guidance_scale=guidance,
        width=512,
        height=512,
        output_type="pil",
    )
    print(f"Pipe took {time.time() - last_time} seconds")
    
    # Save prompt to history
    save_prompt_to_history(prompt)
    
    # ... [rest of the function remains unchanged]

    return results.images[0] if len(results.images) > 0 else None

# Modified save_all_images function
def save_all_images(images):
    if len(images) == 0:
        return None, None
    
    timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
    zip_filename = f"images_and_history_{timestamp}.zip"
    
    with zipfile.ZipFile(zip_filename, 'w') as zipf:
        # Add image files
        for file in images:
            zipf.write(file, os.path.basename(file))
        
        # Add prompt history file
        if os.path.exists("prompt_history.txt"):
            zipf.write("prompt_history.txt")
    
    # Generate download link
    zip_base64 = encode_file_to_base64(zip_filename)
    download_link = f'<a href="data:application/zip;base64,{zip_base64}" download="{zip_filename}">Download All (Images & History)</a>'
    
    return zip_filename, download_link

# Function to read prompt history
def read_prompt_history():
    if os.path.exists("prompt_history.txt"):
        with open("prompt_history.txt", "r") as f:
            return f.read()
    return "No prompts yet."

# Modified Gradio interface
with gr.Blocks(css=css) as demo:
    with gr.Column(elem_id="container"):
        # ... [previous UI components remain unchanged]

        # Add prompt history display
        with gr.Accordion("Prompt History", open=False):
            prompt_history = gr.Code(label="Prompt History", language="text", interactive=False)

        # ... [rest of the UI components]

    # Function to update prompt history display
    def update_prompt_history():
        return read_prompt_history()

    # Connect components
    generate_bt.click(fn=predict, inputs=inputs, outputs=[image, prompt_history], show_progress=False)
    prompt.submit(fn=predict, inputs=inputs, outputs=[image, prompt_history], show_progress=False)
    
    # Update prompt history when generating image or when accordion is opened
    generate_bt.click(fn=update_prompt_history, outputs=prompt_history)
    prompt.submit(fn=update_prompt_history, outputs=prompt_history)

    # Modify save_all_button click event
    save_all_button.click(
        fn=lambda: save_all_images([f for f in os.listdir() if f.lower().endswith((".png", ".jpg", ".jpeg"))]),
        outputs=[gr.File(), gr.HTML()]
    )

demo.queue()
demo.launch(allowed_paths=["/"])