cookey39 commited on
Commit
ebf1f42
1 Parent(s): 2b131ef

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -2
README.md CHANGED
@@ -67,8 +67,10 @@ pipeline = AutoPipelineForText2Image.from_pretrained('stabilityai/stable-diffusi
67
  pipeline.load_lora_weights('cookey39/teratera', weight_name='pytorch_lora_weights.safetensors')
68
  embedding_path = hf_hub_download(repo_id='cookey39/teratera', filename='teratera_emb.safetensors', repo_type="model")
69
  state_dict = load_file(embedding_path)
70
- pipeline.load_textual_inversion(state_dict["clip_l"], token=[], text_encoder=pipeline.text_encoder, tokenizer=pipeline.tokenizer)
71
- pipeline.load_textual_inversion(state_dict["clip_g"], token=[], text_encoder=pipeline.text_encoder_2, tokenizer=pipeline.tokenizer_2)
 
 
72
 
73
  instance_token = "<s0><s1>"
74
  prompt = f"a {instance_token}full-length phoor portrait,Vibrant, solo, 1girl, smile, long hair, hair between eyes, multicolored eyes, hooded jacket, open jacket, shirt, long sleeves, ribbon, best quality, perfect anatomy, highres, absurdres{instance_token} "
 
67
  pipeline.load_lora_weights('cookey39/teratera', weight_name='pytorch_lora_weights.safetensors')
68
  embedding_path = hf_hub_download(repo_id='cookey39/teratera', filename='teratera_emb.safetensors', repo_type="model")
69
  state_dict = load_file(embedding_path)
70
+ # load embeddings of text_encoder 1 (CLIP ViT-L/14)
71
+ pipe.load_textual_inversion(state_dict["clip_l"], token=["<s0>", "<s1>"], text_encoder=pipe.text_encoder, tokenizer=pipe.tokenizer)
72
+ # load embeddings of text_encoder 2 (CLIP ViT-G/14)
73
+ pipe.load_textual_inversion(state_dict["clip_g"], token=["<s0>", "<s1>"], text_encoder=pipe.text_encoder_2, tokenizer=pipe.tokenizer_2)
74
 
75
  instance_token = "<s0><s1>"
76
  prompt = f"a {instance_token}full-length phoor portrait,Vibrant, solo, 1girl, smile, long hair, hair between eyes, multicolored eyes, hooded jacket, open jacket, shirt, long sleeves, ribbon, best quality, perfect anatomy, highres, absurdres{instance_token} "