Spaces:
Sleeping
Sleeping
from transformers import AutoModel, AutoTokenizer | |
from flask import Flask, request, jsonify | |
import tensorflow as tf | |
app = Flask(__name__) | |
# Load Hugging Face model and tokenizer | |
tokenizer = AutoTokenizer.from_pretrained("Erfan11/Neuracraft", use_auth_token="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") | |
hf_model = AutoModel.from_pretrained("Erfan11/Neuracraft", use_auth_token="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") | |
# Load TensorFlow model | |
tf_model = tf.keras.models.load_model('path_to_your_tf_model.h5') | |
def predict(): | |
data = request.get_json() | |
# Tokenize the input using Hugging Face's tokenizer | |
inputs = tokenizer(data["text"], return_tensors="pt") | |
# Make prediction with Hugging Face model | |
hf_outputs = hf_model(**inputs) | |
# Optionally: You can also add TensorFlow model predictions here, depending on what it’s used for. | |
# Assuming the TensorFlow model is used for something else like feature extraction | |
tf_outputs = tf_model.predict([data["text"]]) # Modify based on your input processing | |
return jsonify({ | |
"hf_outputs": hf_outputs[0].tolist(), # Convert Hugging Face output to JSON serializable format | |
"tf_outputs": tf_outputs.tolist() # Convert TensorFlow output to JSON serializable format | |
}) | |
if __name__ == '__main__': | |
app.run(debug=True) |