import torch import random from transformers import GPT2LMHeadModel, GPT2Tokenizer model = GPT2LMHeadModel.from_pretrained("v2/midjourney/small") tokenizer = GPT2Tokenizer.from_pretrained("v2/midjourney/small") model.resize_token_embeddings(len(tokenizer)) device = torch.device("cpu") # test examples #input_text = "Marie Simmons, a dedicated librarian at the New York Public Library, was exploring the depths of the ancient manuscripts section when she stumbled upon an old, mysterious book emitting a faint, mystic glow. As she opened the cryptic pages, a burst of ancient energy enveloped her, unlocking hidden archeological knowledge and skills. The transformation was dramatic and instilled her with the agility and intellect of a seasoned archeologist adventurer. Now, Marie could decipher any historical artifact, navigate through perilous ruins with ease, and uncover secrets locked for millennia. Her life as a librarian took a thrilling turn into adventures, unearthing artifacts that could tell humanity's untold stories. Her daily routine of categorizing led her to categorize the relics of history to solve age-old mysteries.<|endoftext|>" input_text = "It was an ordinary Thursday at interparkways where Ethan Cooper sat intensely focused on solving the complex cybercrime cases that surrounded him. As he rallied his team to identify and neutralize potential threats within miles of downtow elatri to each case. sa dicance suddenly clicked for evansas ithe coe itselfend aered teed gnation manipulaten behaviors consistent with heightened instinctual understanding. Suddenly: Reckless! flashed before his eyes raw intelligence cascaded no t just into actionable insights but also into boldness like watching sharp-shooting sequences in slow motion unfoldstrobbing situatio ns. With this convergence Ethereum became more than a cybersecurity expert; it transformed one elite intersectional hacker identity i nto something far beyond a mere analyst.? Ever since then Carl had become no longer merelyan cypher for justices sake ?he emergedint o existenceiriderously coiningthedigitalpuzzle unlike any other yetinelinformaticaurel.<|endoftext|>" input_ids = tokenizer.encode(input_text, return_tensors="pt").to("cpu") output = model.generate( input_ids, max_length=400, num_return_sequences=1, eos_token_id=tokenizer.eos_token_id, repetition_penalty=2.0, #top_k=200, #top_p=0.9, num_beams=5, #temperature=1.0, #0.7 do_sample=True, use_cache=True, #output also special tokens output_hidden_states=True, ) print("\n", tokenizer.decode(output[0], skip_special_tokens=False), "\n\n", "Answer took", len(output[0]), "tokens\n")