|
import torch |
|
import random |
|
from transformers import GPT2LMHeadModel, GPT2Tokenizer |
|
|
|
model = GPT2LMHeadModel.from_pretrained("v2/abilities/small") |
|
tokenizer = GPT2Tokenizer.from_pretrained("v2/abilities/small") |
|
model.resize_token_embeddings(len(tokenizer)) |
|
|
|
device = torch.device("cpu") |
|
|
|
|
|
|
|
input_text = "It was an ordinary Thursday at interparkways where Ethan Cooper sat intensely focused on solving the complex cybercrime cases that surrounded him. As he rallied his team to identify and neutralize potential threats within miles of downtow elatri to each case. sa dicance suddenly clicked for evansas ithe coe itselfend aered teed gnation manipulaten behaviors consistent with heightened instinctual understanding. Suddenly: Reckless! flashed before his eyes raw intelligence cascaded no t just into actionable insights but also into boldness like watching sharp-shooting sequences in slow motion unfoldstrobbing situatio ns. With this convergence Ethereum became more than a cybersecurity expert; it transformed one elite intersectional hacker identity i nto something far beyond a mere analyst.? Ever since then Carl had become no longer merelyan cypher for justices sake ?he emergedint o existenceiriderously coiningthedigitalpuzzle unlike any other yetinelinformaticaurel.<|endoftext|>" |
|
|
|
input_ids = tokenizer.encode(input_text, return_tensors="pt").to("cpu") |
|
|
|
output = model.generate( |
|
input_ids, |
|
max_length=400, |
|
num_return_sequences=1, |
|
eos_token_id=tokenizer.eos_token_id, |
|
repetition_penalty=2.0, |
|
|
|
|
|
num_beams=5, |
|
|
|
do_sample=True, |
|
use_cache=True, |
|
|
|
output_hidden_states=True, |
|
) |
|
|
|
print("\n", tokenizer.decode(output[0], skip_special_tokens=False), "\n\n", "Answer took", len(output[0]), "tokens\n") |
|
|