|
|
|
import os |
|
import pandas as pd |
|
|
|
import nomic |
|
from nomic import atlas |
|
from nomic.dataset import AtlasClass |
|
import numpy as np |
|
|
|
from src.my_logger import setup_logger |
|
|
|
NOMIC_KEY = os.getenv('NOMIC_KEY') |
|
nomic.login(NOMIC_KEY) |
|
logger = setup_logger(__name__) |
|
|
|
|
|
def count_words(text): |
|
words = text.split() |
|
return len(words) |
|
|
|
|
|
def build_nomic(dataset): |
|
df = dataset['train'].to_pandas() |
|
|
|
non_embedding_columns = ['date_utc', 'title', 'flair', 'content', 'poster', 'permalink', 'id', 'word_count', |
|
'score', 'score_percentile'] |
|
|
|
|
|
percentiles = df['score'].quantile([0, .1, .2, .3, .4, .5, .6, .7, .8, .9]).tolist() |
|
|
|
|
|
bins = sorted(set(percentiles + [df['score'].max()])) |
|
|
|
|
|
|
|
labels = [int(i * 10) for i in range(len(bins) - 1)] |
|
|
|
|
|
|
|
df['score_percentile'] = pd.cut(df['score'], bins=bins, labels=labels, include_lowest=True) |
|
|
|
df['word_count'] = df['content'].apply(count_words) |
|
|
|
|
|
logger.info(f"Trying to delete old version of nomic Atlas...") |
|
try: |
|
ac = AtlasClass() |
|
atlas_id = ac._get_dataset_by_slug_identifier("derek2/boru-subreddit-neural-search")['id'] |
|
ac._delete_project_by_id(atlas_id) |
|
logger.info(f"Succeeded in deleting old version of nomic Atlas.") |
|
except: |
|
logger.info(f"Failed to delete old version of nomic Atlas.") |
|
|
|
|
|
logger.info(f"Trying to create new version of Atlas...") |
|
project = atlas.map_data(embeddings=np.stack(df['embedding'].values), |
|
data=df[non_embedding_columns].to_dict(orient='records'), |
|
id_field='id', |
|
identifier='BORU Subreddit Neural Search', |
|
) |
|
logger.info(f"Succeeded in creating new version of nomic Atlas.") |
|
|