from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TrainingArguments, Trainer, pipeline from peft import PeftModel, PeftConfig from huggingface_hub import login import bitsandbytes as bnb import torch import time import pandas as pd import numpy as np import streamlit as st st.set_page_config( page_title="Code Generation", page_icon="🤖", layout="wide", initial_sidebar_state="expanded", ) login(token='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', add_to_git_credential=True) st.title("Code Generation") st.write('MODEL: TinyPixel/Llama-2-7B-bf16-sharded') bnb_config = BitsAndBytesConfig( load_in_4bit=True, bnb_4bit_use_double_quant=True, bnb_4bit_quant_type="nf4", bnb_4bit_compute_dtype=torch.bfloat16 ) model_name='red1xe/Llama-2-7B-codeGPT' tokenizer = AutoTokenizer.from_pretrained(model_name) model= AutoModelForCausalLM.from_pretrained(model_name, quantization_config=bnb_config)