Textwizai / app.py
Erfan11's picture
Update app.py
ae4dfbe verified
raw
history blame
695 Bytes
from transformers import AutoModel, AutoTokenizer
from flask import Flask, request, jsonify
app = Flask(__name__)
# Load model and tokenizer from Hugging Face Hub
tokenizer = AutoTokenizer.from_pretrained("Erfan11/Neuracraft", use_auth_token="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")
model = AutoModel.from_pretrained("Erfan11/Neuracraft", use_auth_token="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")
@app.route('/predict', methods=['POST'])
def predict():
data = request.get_json()
inputs = tokenizer(data["text"], return_tensors="pt")
outputs = model(**inputs)
# Process your model's output as needed
return jsonify(outputs)
if __name__ == '__main__':
app.run(debug=True)