BeveledCube commited on
Commit
5517f9c
1 Parent(s): e7b7706

Upload 2 files

Browse files
Files changed (2) hide show
  1. main.py +31 -0
  2. requirements.txt +3 -0
main.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ from transformers import GPT2LMHeadModel, GPT2Tokenizer
3
+ import torch
4
+
5
+ app = Flask("Response API")
6
+
7
+ # Load the Hugging Face GPT-2 model and tokenizer
8
+ model = GPT2LMHeadModel.from_pretrained("microsoft/DialoGPT-medium")
9
+ tokenizer = GPT2Tokenizer.from_pretrained("microsoft/DialoGPT-medium")
10
+
11
+ @app.route("/", methods=["POST"])
12
+ def receive_data():
13
+ data = request.get_json()
14
+
15
+ print("Prompt:", data['prompt'])
16
+ print("Length:", data['length'])
17
+
18
+ input_text = data['prompt']
19
+
20
+ # Tokenize the input text
21
+ input_ids = tokenizer.encode(input_text, return_tensors="pt")
22
+
23
+ # Generate output using the model
24
+ output_ids = model.generate(input_ids, max_length=data['length'], num_beams=5, no_repeat_ngram_size=2)
25
+ generated_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
26
+
27
+ answer_data = { "answer": generated_text }
28
+ print("Answered with:", answer_data)
29
+ return jsonify(answer_data)
30
+
31
+ app.run(debug=False, port=7860)
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ transformers
2
+ torch
3
+ flask