Datasets:

Languages:
English
ArXiv:
License:
File size: 1,201 Bytes
a45fb11
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import pandas as pd
import os
import fsspec
import requests


def load_github_metadata(download_dir: str = "~/.objaverse") -> pd.DataFrame:
    """Loads the GitHub 3D object metadata as a Pandas DataFrame.

    Args:
        download_dir (str, optional): Directory to download the parquet metadata file.
            Supports all file systems supported by fsspec. Defaults to "~/.objaverse".

    Returns:
        pd.DataFrame: GitHub 3D object metadata as a Pandas DataFrame with columns for
            the object "githubUrl", "license", and "sha256".
    """
    filename = os.path.join(download_dir, "github", "github-urls.parquet")
    fs, path = fsspec.core.url_to_fs(filename)
    fs.makedirs(os.path.dirname(path), exist_ok=True)

    # download the parquet file if it doesn't exist
    if not fs.exists(path):
        url = "https://huggingface.co/datasets/allenai/objaverse-xl/resolve/main/github/github-urls.parquet"

        response = requests.get(url)
        response.raise_for_status()
        with fs.open(path, "wb") as file:
            file.write(response.content)

    # load the parquet file with fsspec
    with fs.open(path) as f:
        df = pd.read_parquet(f)

    return df