Spaces:
Sleeping
Sleeping
from transformers import AutoModel, AutoTokenizer | |
from flask import Flask, request, jsonify | |
app = Flask(__name__) | |
# Load model and tokenizer from Hugging Face Hub | |
tokenizer = AutoTokenizer.from_pretrained("Erfan11/Neuracraft", use_auth_token="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") | |
model = AutoModel.from_pretrained("Erfan11/Neuracraft", use_auth_token="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") | |
def predict(): | |
data = request.get_json() | |
inputs = tokenizer(data["text"], return_tensors="pt") | |
outputs = model(**inputs) | |
# Process your model's output as needed | |
return jsonify(outputs) | |
if __name__ == '__main__': | |
app.run(debug=True) |