mike23415's picture
Update app.py
7507a36 verified
raw
history blame
684 Bytes
import os
from transformers import pipeline
# Set custom cache directory to avoid permission issues
os.environ["TRANSFORMERS_CACHE"] = "/app/cache"
# Load the model with TensorFlow weights if PyTorch version is unavailable
summarizer = pipeline("summarization", model="t5-base", from_tf=True)
from flask import Flask, request, jsonify
app = Flask(__name__)
@app.route("/summarize", methods=["POST"])
def summarize():
data = request.json
text = data.get("text", "")
summary = summarizer(text, max_length=150, min_length=30, do_sample=False)
return jsonify({"summary": summary[0]["summary_text"]})
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860)