Erfan11 commited on
Commit
4eaf7ce
1 Parent(s): 374fdff

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +14 -13
main.py CHANGED
@@ -1,29 +1,30 @@
1
  import os
2
- import tensorflow as tf
3
  from dotenv import load_dotenv
4
- from transformers import BertTokenizerFast
 
5
 
6
- # Load environment variables
7
- load_dotenv()
 
8
 
9
- def load_model(model_path):
10
- # Load the TensorFlow model using from_tf=True
11
- model = tf.keras.models.load_model(model_path)
12
  return model
13
 
14
- def load_tokenizer(model_path):
15
- tokenizer = BertTokenizerFast.from_pretrained(model_path)
16
  return tokenizer
17
 
18
  def predict(text, model, tokenizer):
19
  inputs = tokenizer(text, return_tensors="tf")
20
- outputs = model(inputs)
21
  return outputs
22
 
23
  def main():
24
- model_path = os.getenv('Erfan11/Neuracraft')
25
- model = load_model(model_path)
26
- tokenizer = load_tokenizer(model_path)
27
  # Example usage
28
  text = "Sample input text"
29
  result = predict(text, model, tokenizer)
 
1
  import os
 
2
  from dotenv import load_dotenv
3
+ from transformers import TFBertForSequenceClassification, BertTokenizerFast
4
+ import tensorflow as tf
5
 
6
+ # Directly specify model and API key
7
+ MODEL_NAME = "Erfan11/Neuracraft"
8
+ API_KEY = "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"
9
 
10
+ def load_model(model_name):
11
+ # Load the TensorFlow model from Hugging Face Hub
12
+ model = TFBertForSequenceClassification.from_pretrained(model_name, use_auth_token=API_KEY)
13
  return model
14
 
15
+ def load_tokenizer(model_name):
16
+ tokenizer = BertTokenizerFast.from_pretrained(model_name, use_auth_token=API_KEY)
17
  return tokenizer
18
 
19
  def predict(text, model, tokenizer):
20
  inputs = tokenizer(text, return_tensors="tf")
21
+ outputs = model(**inputs)
22
  return outputs
23
 
24
  def main():
25
+ model_name = MODEL_NAME
26
+ model = load_model(model_name)
27
+ tokenizer = load_tokenizer(model_name)
28
  # Example usage
29
  text = "Sample input text"
30
  result = predict(text, model, tokenizer)