Datasets:

Languages:
English
ArXiv:
License:
mattdeitke commited on
Commit
d98e404
1 Parent(s): ef2edf8

add initial download api

Browse files
Files changed (1) hide show
  1. objaverse_xl/github.py +1003 -0
objaverse_xl/github.py CHANGED
@@ -1,7 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import pandas as pd
 
 
2
  import os
 
3
  import fsspec
4
  import requests
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
 
7
  def load_github_metadata(download_dir: str = "~/.objaverse") -> pd.DataFrame:
@@ -33,3 +117,922 @@ def load_github_metadata(download_dir: str = "~/.objaverse") -> pd.DataFrame:
33
  df = pd.read_parquet(f)
34
 
35
  return df
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+
3
+ - handle_found_object: Called when an object is successfully found and downloaded. Here,
4
+ the object has the same sha256 as the one that was downloaded with Objaverse-XL. If
5
+ not specified, the object will be downloaded, but nothing will be done with it.
6
+
7
+ Args:
8
+ file (str): Local path to the downloaded 3D object.
9
+ github_url (str): GitHub URL of the 3D object.
10
+ sha256 (str): SHA256 of the contents of the 3D object.
11
+ repo (str): Name of the GitHub repo where the 3D object comes from.
12
+ organization (str): Name of the GitHub organization where the 3D object comes from.
13
+ Returns: Any
14
+
15
+ - handle_new_object: Called when a new object is found. Here, the object is not used in
16
+ Objaverse-XL, but is still downloaded with the repository. The object may have not
17
+ been used because it does not successfully import into Blender. If not specified,
18
+ the object will be downloaded, but nothing will be done with it.
19
+
20
+ Args:
21
+ file (str): Local path to the downloaded 3D object.
22
+ github_url (str): GitHub URL of the 3D object.
23
+ sha256 (str): SHA256 of the contents of the 3D object.
24
+ repo (str): Name of the GitHub repo where the 3D object comes from.
25
+ organization (str): Name of the GitHub organization where the 3D object comes from.
26
+ Returns: Any
27
+
28
+ - handle_modified_object: Called when a modified object is found and downloaded. Here,
29
+ the object is successfully downloaded, but it has a different sha256 than the one
30
+ that was downloaded with Objaverse-XL. This is not expected to happen very often,
31
+ because the same commit hash is used for each repo. If not specified, the object
32
+ will be downloaded, but nothing will be done with it.
33
+
34
+ Args:
35
+ file (str): Local path to the downloaded 3D object.
36
+ github_url (str): GitHub URL of the 3D object.
37
+ sha256 (str): SHA256 of the contents of the 3D object.
38
+ repo (str): Name of the GitHub repo where the 3D object comes from.
39
+ organization (str): Name of the GitHub organization where the 3D object comes from.
40
+ Returns: Any
41
+
42
+ - handle_missing_object: Called when an object that is in Objaverse-XL is not found.
43
+ Here, it is likely that the repository was deleted or renamed. If not specified, the
44
+ object will be downloaded, but nothing will be done with it.
45
+
46
+ Args:
47
+ github_url (str): GitHub URL of the 3D object.
48
+ sha256 (str): SHA256 of the contents of the original 3D object.
49
+ repo (str): Name of the GitHub repo where the 3D object comes from.
50
+ organization (str): Name of the GitHub organization where the 3D object comes from.
51
+ Returns: Any
52
+
53
+
54
+ Note: You'll likely find more objects by
55
+
56
+ """
57
+
58
+ # %%
59
+ import shutil
60
+ import json
61
  import pandas as pd
62
+ from typing import Dict, List, Optional, Literal, Callable
63
+ from tqdm import tqdm
64
  import os
65
+ import tarfile
66
  import fsspec
67
  import requests
68
+ import tempfile
69
+ import subprocess
70
+ from loguru import logger
71
+ import hashlib
72
+ from multiprocessing import Pool
73
+ import multiprocessing
74
+
75
+ FILE_EXTENSIONS = [
76
+ ".obj",
77
+ ".glb",
78
+ ".gltf",
79
+ ".usdz",
80
+ ".usd",
81
+ ".fbx",
82
+ ".stl",
83
+ ".usda",
84
+ ".dae",
85
+ ".ply",
86
+ ".abc",
87
+ ".blend",
88
+ ]
89
 
90
 
91
  def load_github_metadata(download_dir: str = "~/.objaverse") -> pd.DataFrame:
 
117
  df = pd.read_parquet(f)
118
 
119
  return df
120
+
121
+
122
+ def _get_repo_id_with_hash(item: pd.Series) -> str:
123
+ org, repo = item["githubUrl"].split("/")[3:5]
124
+ commit_hash = item["githubUrl"].split("/")[6]
125
+ return f"{org}/{repo}/{commit_hash}"
126
+
127
+
128
+ def _git_shallow_clone(repo_url: str, target_directory: str) -> bool:
129
+ return _run_command_with_check(
130
+ ["git", "clone", "--depth", "1", repo_url, target_directory],
131
+ )
132
+
133
+
134
+ def _run_command_with_check(command: List[str], cwd: Optional[str] = None) -> bool:
135
+ try:
136
+ subprocess.run(
137
+ command,
138
+ cwd=cwd,
139
+ check=True,
140
+ stdout=subprocess.DEVNULL,
141
+ stderr=subprocess.DEVNULL,
142
+ )
143
+ return True
144
+ except subprocess.CalledProcessError as e:
145
+ logger.error("Error:", e)
146
+ logger.error(e.stdout)
147
+ logger.error(e.stderr)
148
+ return False
149
+
150
+
151
+ def get_file_hash(file_path: str) -> str:
152
+ # Check if the path is a symbolic link
153
+ if os.path.islink(file_path):
154
+ # Resolve the symbolic link
155
+ resolved_path = os.readlink(file_path)
156
+ # Check if the resolved path exists
157
+ if not os.path.exists(resolved_path):
158
+ raise FileNotFoundError(
159
+ f"The symbolic link points to a file that doesn't exist: {resolved_path}"
160
+ )
161
+ sha256 = hashlib.sha256()
162
+ # Read the file from the path
163
+ with open(file_path, "rb") as f:
164
+ # Loop till the end of the file
165
+ for byte_block in iter(lambda: f.read(4096), b""):
166
+ sha256.update(byte_block)
167
+ return sha256.hexdigest()
168
+
169
+
170
+ def _process_repo(
171
+ repo_id: str,
172
+ fs: fsspec.AbstractFileSystem,
173
+ base_dir: str,
174
+ save_repo_format: Optional[Literal["zip", "tar", "tar.gz", "files"]],
175
+ expected_objects: Dict[str, str],
176
+ handle_found_object: Optional[Callable],
177
+ handle_modified_object: Optional[Callable],
178
+ handle_missing_object: Optional[Callable],
179
+ handle_new_object: Optional[Callable],
180
+ commit_hash: Optional[str],
181
+ ) -> List[Dict[str, str]]:
182
+ """
183
+
184
+ Args:
185
+ expected_objects (Dict[str, str]): Dictionary of objects that one expects to
186
+ find in the repo. Keys are the GitHub URLs and values are the sha256 of the
187
+ objects.
188
+ """
189
+ # NOTE: assuming that the user has already checked that the repo doesn't exist,
190
+ org, repo = repo_id.split("/")
191
+
192
+ with tempfile.TemporaryDirectory() as temp_dir:
193
+ # clone the repo to a temp directory
194
+ target_directory = os.path.join(temp_dir, repo)
195
+ successful_clone = _git_shallow_clone(
196
+ f"https://github.com/{org}/{repo}.git", target_directory
197
+ )
198
+ if not successful_clone:
199
+ logger.error(f"Could not clone {repo_id}")
200
+ if handle_missing_object is not None:
201
+ for github_url, sha256 in expected_objects.items():
202
+ handle_missing_object(
203
+ github_url=github_url,
204
+ sha256=sha256,
205
+ repo=repo,
206
+ organization=org,
207
+ )
208
+ return []
209
+
210
+ # use the commit hash if specified
211
+ repo_commit_hash = _get_commit_hash_from_local_git_dir(target_directory)
212
+ if commit_hash is not None:
213
+ keep_going = True
214
+ if repo_commit_hash != commit_hash:
215
+ # run git reset --hard && git checkout 37f4d8d287e201ce52c048bf74d46d6a09d26b2c
216
+ if not _run_command_with_check(
217
+ ["git", "fetch", "origin", commit_hash], target_directory
218
+ ):
219
+ logger.error(
220
+ f"Error in git fetch! Sticking with {repo_commit_hash=} instead of {commit_hash=}"
221
+ )
222
+ keep_going = False
223
+
224
+ if keep_going and not _run_command_with_check(
225
+ ["git", "reset", "--hard"], target_directory
226
+ ):
227
+ logger.error(
228
+ f"Error in git reset! Sticking with {repo_commit_hash=} instead of {commit_hash=}"
229
+ )
230
+ keep_going = False
231
+
232
+ if keep_going:
233
+ if _run_command_with_check(
234
+ ["git", "checkout", commit_hash], target_directory
235
+ ):
236
+ repo_commit_hash = commit_hash
237
+ else:
238
+ logger.error(
239
+ f"Error in git checkout! Sticking with {repo_commit_hash=} instead of {commit_hash=}"
240
+ )
241
+
242
+ # pull the lfs files
243
+ _pull_lfs_files(target_directory)
244
+
245
+ # get all the files in the repo
246
+ files = _list_files(target_directory)
247
+ files_with_3d_extension = [
248
+ file
249
+ for file in files
250
+ if any(file.lower().endswith(ext) for ext in FILE_EXTENSIONS)
251
+ ]
252
+
253
+ # get the sha256 for each file
254
+ file_hashes = []
255
+ for file in files_with_3d_extension:
256
+ file_hash = get_file_hash(file)
257
+ # remove the temp_dir from the file path
258
+ github_url = file.replace(
259
+ target_directory,
260
+ f"https://github.com/{org}/{repo}/blob/{repo_commit_hash}",
261
+ )
262
+ file_hashes.append(dict(sha256=file_hash, githubUrl=github_url))
263
+
264
+ # handle the object under different conditions
265
+ if github_url in expected_objects:
266
+ if expected_objects[github_url] == file_hash:
267
+ if handle_found_object is not None:
268
+ handle_found_object(
269
+ file=file,
270
+ github_url=github_url,
271
+ sha256=file_hash,
272
+ repo=repo,
273
+ organization=org,
274
+ )
275
+ else:
276
+ if handle_modified_object is not None:
277
+ handle_modified_object(
278
+ file=file,
279
+ github_url=github_url,
280
+ sha256=file_hash,
281
+ repo=repo,
282
+ organization=org,
283
+ )
284
+ elif handle_new_object is not None:
285
+ handle_new_object(
286
+ file=file,
287
+ github_url=github_url,
288
+ sha256=file_hash,
289
+ repo=repo,
290
+ organization=org,
291
+ )
292
+
293
+ # save the file hashes to a json file
294
+ with open(
295
+ os.path.join(target_directory, ".objaverse-file-hashes.json"), "w"
296
+ ) as f:
297
+ json.dump(file_hashes, f, indent=2)
298
+
299
+ # remove the .git directory
300
+ shutil.rmtree(os.path.join(target_directory, ".git"))
301
+
302
+ if save_repo_format is not None:
303
+ logger.debug(f"Saving as {save_repo_format}")
304
+ # save the repo to a zip file
305
+ if save_repo_format == "zip":
306
+ shutil.make_archive(target_directory, "zip", target_directory)
307
+ elif save_repo_format == "tar":
308
+ with tarfile.open(os.path.join(temp_dir, f"{repo}.tar"), "w") as tar:
309
+ tar.add(target_directory, arcname=repo)
310
+ elif save_repo_format == "tar.gz":
311
+ with tarfile.open(
312
+ os.path.join(temp_dir, f"{repo}.tar.gz"), "w:gz"
313
+ ) as tar:
314
+ tar.add(target_directory, arcname=repo)
315
+ elif save_repo_format == "files":
316
+ pass
317
+ else:
318
+ raise ValueError(
319
+ f"save_repo_format must be one of zip, tar, tar.gz, files. Got {save_repo_format}"
320
+ )
321
+
322
+ dirname = os.path.join(base_dir, "repos", org)
323
+ fs.makedirs(dirname, exist_ok=True)
324
+ if save_repo_format != "files":
325
+ # move the repo to the correct location (with put)
326
+ fs.put(
327
+ os.path.join(temp_dir, f"{repo}.{save_repo_format}"),
328
+ os.path.join(dirname, f"{repo}.{save_repo_format}"),
329
+ )
330
+ else:
331
+ # move the repo to the correct location (with put)
332
+ fs.put(target_directory, dirname, recursive=True)
333
+
334
+ # get each object that was missing from the expected objects
335
+ if handle_missing_object is not None:
336
+ obtained_urls = {x["githubUrl"] for x in file_hashes}
337
+ for github_url, sha256 in expected_objects.items():
338
+ if github_url not in obtained_urls:
339
+ handle_missing_object(
340
+ github_url=github_url,
341
+ sha256=sha256,
342
+ repo=repo,
343
+ organization=org,
344
+ )
345
+
346
+ return file_hashes
347
+
348
+
349
+ def _list_files(root_dir: str) -> List[str]:
350
+ return [
351
+ os.path.join(root, f) for root, dirs, files in os.walk(root_dir) for f in files
352
+ ]
353
+
354
+
355
+ def _pull_lfs_files(repo_dir: str) -> None:
356
+ if _has_lfs_files(repo_dir):
357
+ subprocess.run(["git", "lfs", "pull"], cwd=repo_dir)
358
+
359
+
360
+ def _has_lfs_files(repo_dir: str) -> bool:
361
+ gitattributes_path = os.path.join(repo_dir, ".gitattributes")
362
+ if not os.path.exists(gitattributes_path):
363
+ return False
364
+ with open(gitattributes_path, "r") as f:
365
+ for line in f:
366
+ if "filter=lfs" in line:
367
+ return True
368
+ return False
369
+
370
+
371
+ def _get_commit_hash_from_local_git_dir(local_git_dir: str) -> str:
372
+ # get the git hash of the repo
373
+ result = subprocess.run(
374
+ ["git", "rev-parse", "HEAD"], cwd=local_git_dir, capture_output=True
375
+ )
376
+ commit_hash = result.stdout.strip().decode("utf-8")
377
+ return commit_hash
378
+
379
+
380
+ def _parallel_process_repo(args) -> List[Dict[str, str]]:
381
+ (
382
+ repo_id_hash,
383
+ fs,
384
+ base_dir,
385
+ save_repo_format,
386
+ expected_objects,
387
+ handle_found_object,
388
+ handle_modified_object,
389
+ handle_missing_object,
390
+ handle_new_object,
391
+ ) = args
392
+ repo_id = "/".join(repo_id_hash.split("/")[:2])
393
+ commit_hash = repo_id_hash.split("/")[2]
394
+ return _process_repo(
395
+ repo_id=repo_id,
396
+ fs=fs,
397
+ base_dir=base_dir,
398
+ save_repo_format=save_repo_format,
399
+ expected_objects=expected_objects,
400
+ handle_found_object=handle_found_object,
401
+ handle_modified_object=handle_modified_object,
402
+ handle_missing_object=handle_missing_object,
403
+ handle_new_object=handle_new_object,
404
+ commit_hash=commit_hash,
405
+ )
406
+
407
+
408
+ def _process_group(group):
409
+ key, group_df = group
410
+ return key, group_df.set_index("githubUrl")["sha256"].to_dict()
411
+
412
+
413
+ # %%
414
+
415
+
416
+ def download_github_objects(
417
+ objects: pd.DataFrame,
418
+ processes: Optional[int] = None,
419
+ download_dir: str = "~/.objaverse",
420
+ save_repo_format: Optional[Literal["zip", "tar", "tar.gz", "files"]] = None,
421
+ handle_found_object: Optional[Callable] = None,
422
+ handle_modified_object: Optional[Callable] = None,
423
+ handle_missing_object: Optional[Callable] = None,
424
+ handle_new_object: Optional[Callable] = None,
425
+ ) -> List[Dict[str, str]]:
426
+ """Download the specified GitHub objects.
427
+
428
+ Args:
429
+ objects (pd.DataFrmae): GitHub objects to download. Must have columns for the
430
+ object "githubUrl" and "sha256". Use the load_github_metadata function to
431
+ get the metadata.
432
+ processes (Optional[int], optional): Number of processes to use for downloading.
433
+ If None, will use the number of CPUs on the machine. Defaults to None.
434
+ download_dir (str, optional): Directory to download the GitHub objects to.
435
+ Supports all file systems supported by fsspec. Defaults to "~/.objaverse".
436
+
437
+ Returns:
438
+ List[Dict[str, str]]: List of dictionaries with the keys "githubUrl" and
439
+ "sha256" for each downloaded object.
440
+ """
441
+ if processes is None:
442
+ processes = multiprocessing.cpu_count()
443
+
444
+ base_download_dir = os.path.join(download_dir, "github")
445
+ fs, path = fsspec.core.url_to_fs(base_download_dir)
446
+ fs.makedirs(path, exist_ok=True)
447
+
448
+ # Getting immediate subdirectories of root_path
449
+ if save_repo_format == "files":
450
+ downloaded_repo_dirs = fs.glob(base_download_dir + "/repos/*/*/")
451
+ downloaded_repo_ids = set(
452
+ ["/".join(x.split("/")[-2:]) for x in downloaded_repo_dirs]
453
+ )
454
+ else:
455
+ downloaded_repo_dirs = fs.glob(
456
+ base_download_dir + f"/repos/*/*.{save_repo_format}"
457
+ )
458
+ downloaded_repo_ids = set()
459
+ for x in downloaded_repo_dirs:
460
+ org, repo = x.split("/")[-2:]
461
+ repo = repo[: -len(f".{save_repo_format}")]
462
+ repo_id = f"{org}/{repo}"
463
+ downloaded_repo_ids.add(repo_id)
464
+
465
+ # make copy of objects
466
+ objects = objects.copy()
467
+
468
+ # get the unique repoIds
469
+ objects["repoIdHash"] = objects.apply(_get_repo_id_with_hash, axis=1)
470
+ repo_id_hashes = set(objects["repoIdHash"].unique().tolist())
471
+ repo_ids = set(
472
+ ["/".join(repo_id_hash.split("/")[:2]) for repo_id_hash in repo_id_hashes]
473
+ )
474
+ assert len(repo_id_hashes) == len(repo_ids), (
475
+ f"More than 1 commit hash per repoId!"
476
+ f" {len(repo_id_hashes)=}, {len(repo_ids)=}"
477
+ )
478
+
479
+ logger.info(
480
+ f"Provided {len(repo_ids)} repoIds with {len(objects)} objects to process."
481
+ )
482
+
483
+ # remove repoIds that have already been downloaded
484
+ repo_ids_to_download = repo_ids - downloaded_repo_ids
485
+ repo_id_hashes_to_download = [
486
+ repo_id_hash
487
+ for repo_id_hash in repo_id_hashes
488
+ if "/".join(repo_id_hash.split("/")[:2]) in repo_ids_to_download
489
+ ]
490
+
491
+ logger.info(
492
+ f"Found {len(repo_ids_to_download)} not yet downloaded. Downloading now..."
493
+ )
494
+
495
+ # get the objects to download
496
+ groups = list(objects.groupby("repoIdHash"))
497
+ with Pool(processes=processes) as pool:
498
+ out_list = list(
499
+ tqdm(
500
+ pool.imap_unordered(_process_group, groups),
501
+ total=len(groups),
502
+ desc="Grouping objects by repository",
503
+ )
504
+ )
505
+ objects_per_repo_id_hash = dict(out_list)
506
+
507
+ all_args = [
508
+ (
509
+ repo_id_hash,
510
+ fs,
511
+ path,
512
+ save_repo_format,
513
+ objects_per_repo_id_hash[repo_id_hash],
514
+ handle_found_object,
515
+ handle_missing_object,
516
+ handle_modified_object,
517
+ handle_new_object,
518
+ )
519
+ for repo_id_hash in repo_id_hashes_to_download
520
+ ]
521
+
522
+ with Pool(processes=processes) as pool:
523
+ # use tqdm to show progress
524
+ out = list(
525
+ tqdm(
526
+ pool.imap_unordered(_parallel_process_repo, all_args),
527
+ total=len(all_args),
528
+ )
529
+ )
530
+ out_list = [item for sublist in out for item in sublist]
531
+ return out_list
532
+
533
+
534
+ def test_process_repo():
535
+ download_dir = "~/.objaverse-tests"
536
+ base_download_dir = os.path.join(download_dir, "github")
537
+ fs, path = fsspec.core.url_to_fs(base_download_dir)
538
+ fs.makedirs(path, exist_ok=True)
539
+
540
+ for save_repo_format in ["tar", "tar.gz", "zip", "files"]:
541
+ # shutil.rmtree(os.path.join(path, "repos"), ignore_errors=True)
542
+ out = _process_repo(
543
+ repo_id="mattdeitke/objaverse-xl-test-files",
544
+ fs=fs,
545
+ base_dir=path,
546
+ save_repo_format=save_repo_format, # type: ignore
547
+ expected_objects=dict(),
548
+ handle_found_object=None,
549
+ handle_modified_object=None,
550
+ handle_missing_object=None,
551
+ handle_new_object=None,
552
+ commit_hash="6928b08a2501aa7a4a4aabac1f888b66e7782056",
553
+ )
554
+
555
+ # test that the sha256's are correct
556
+ assert len(out) == 3
557
+ sha256s = [x["sha256"] for x in out]
558
+ for sha256 in [
559
+ "d2b9a5d7c47dc93526082c9b630157ab6bce4fd8669610d942176f4a36444e71",
560
+ "04e6377317d6818e32c5cbd1951e76deb3641bbf4f6db6933046221d5fbf1c5c",
561
+ "7037575f47816118e5a34e7c0da9927e1be7be3f5b4adfac337710822eb50fa9",
562
+ ]:
563
+ assert sha256 in sha256s, f"{sha256=} not in {sha256s=}"
564
+ github_urls = [x["githubUrl"] for x in out]
565
+ for github_url in [
566
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.fbx",
567
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.glb",
568
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.obj",
569
+ ]:
570
+ assert github_url in github_urls, f"{github_url=} not in {github_urls=}"
571
+
572
+ # test that the files are correct
573
+ if save_repo_format != "files":
574
+ assert fs.exists(
575
+ os.path.join(
576
+ path,
577
+ "repos",
578
+ "mattdeitke",
579
+ f"objaverse-xl-test-files.{save_repo_format}",
580
+ )
581
+ )
582
+ else:
583
+ assert fs.exists(
584
+ os.path.join(
585
+ base_download_dir, "repos", "mattdeitke", "objaverse-xl-test-files"
586
+ )
587
+ )
588
+
589
+
590
+ def test_handle_new_object():
591
+ found_objects = []
592
+ handle_found_object = (
593
+ lambda file, github_url, sha256, repo, organization: found_objects.append(
594
+ dict(
595
+ file=file,
596
+ github_url=github_url,
597
+ sha256=sha256,
598
+ repo=repo,
599
+ organization=organization,
600
+ )
601
+ )
602
+ )
603
+
604
+ missing_objects = []
605
+ handle_missing_object = (
606
+ lambda github_url, sha256, repo, organization: missing_objects.append(
607
+ dict(
608
+ github_url=github_url,
609
+ sha256=sha256,
610
+ repo=repo,
611
+ organization=organization,
612
+ )
613
+ )
614
+ )
615
+
616
+ new_objects = []
617
+ handle_new_object = (
618
+ lambda file, github_url, sha256, repo, organization: new_objects.append(
619
+ dict(
620
+ file=file,
621
+ github_url=github_url,
622
+ sha256=sha256,
623
+ repo=repo,
624
+ organization=organization,
625
+ )
626
+ )
627
+ )
628
+
629
+ modified_objects = []
630
+ handle_modified_object = (
631
+ lambda file, github_url, sha256, repo, organization: modified_objects.append(
632
+ dict(
633
+ file=file,
634
+ github_url=github_url,
635
+ sha256=sha256,
636
+ repo=repo,
637
+ organization=organization,
638
+ )
639
+ )
640
+ )
641
+
642
+ download_dir = "~/.objaverse-tests"
643
+ base_download_dir = os.path.join(download_dir, "github")
644
+ fs, path = fsspec.core.url_to_fs(base_download_dir)
645
+ fs.makedirs(path, exist_ok=True)
646
+
647
+ shutil.rmtree(os.path.join(path, "repos"), ignore_errors=True)
648
+ out = _process_repo(
649
+ repo_id="mattdeitke/objaverse-xl-test-files",
650
+ fs=fs,
651
+ base_dir=path,
652
+ save_repo_format=None,
653
+ expected_objects=dict(),
654
+ handle_found_object=handle_found_object,
655
+ handle_modified_object=handle_modified_object,
656
+ handle_missing_object=handle_missing_object,
657
+ handle_new_object=handle_new_object,
658
+ commit_hash="6928b08a2501aa7a4a4aabac1f888b66e7782056",
659
+ )
660
+
661
+ assert len(out) == 3
662
+ assert len(new_objects) == 3
663
+ assert len(found_objects) == 0
664
+ assert len(modified_objects) == 0
665
+ assert len(missing_objects) == 0
666
+
667
+
668
+ def test_handle_found_object():
669
+ found_objects = []
670
+ handle_found_object = (
671
+ lambda file, github_url, sha256, repo, organization: found_objects.append(
672
+ dict(
673
+ file=file,
674
+ github_url=github_url,
675
+ sha256=sha256,
676
+ repo=repo,
677
+ organization=organization,
678
+ )
679
+ )
680
+ )
681
+
682
+ missing_objects = []
683
+ handle_missing_object = (
684
+ lambda github_url, sha256, repo, organization: missing_objects.append(
685
+ dict(
686
+ github_url=github_url,
687
+ sha256=sha256,
688
+ repo=repo,
689
+ organization=organization,
690
+ )
691
+ )
692
+ )
693
+
694
+ new_objects = []
695
+ handle_new_object = (
696
+ lambda file, github_url, sha256, repo, organization: new_objects.append(
697
+ dict(
698
+ file=file,
699
+ github_url=github_url,
700
+ sha256=sha256,
701
+ repo=repo,
702
+ organization=organization,
703
+ )
704
+ )
705
+ )
706
+
707
+ modified_objects = []
708
+ handle_modified_object = (
709
+ lambda file, github_url, sha256, repo, organization: modified_objects.append(
710
+ dict(
711
+ file=file,
712
+ github_url=github_url,
713
+ sha256=sha256,
714
+ repo=repo,
715
+ organization=organization,
716
+ )
717
+ )
718
+ )
719
+
720
+ download_dir = "~/.objaverse-tests"
721
+ base_download_dir = os.path.join(download_dir, "github")
722
+ fs, path = fsspec.core.url_to_fs(base_download_dir)
723
+ fs.makedirs(path, exist_ok=True)
724
+
725
+ shutil.rmtree(os.path.join(path, "repos"), ignore_errors=True)
726
+ out = _process_repo(
727
+ repo_id="mattdeitke/objaverse-xl-test-files",
728
+ fs=fs,
729
+ base_dir=path,
730
+ save_repo_format=None,
731
+ expected_objects={
732
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.fbx": "7037575f47816118e5a34e7c0da9927e1be7be3f5b4adfac337710822eb50fa9",
733
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.glb": "04e6377317d6818e32c5cbd1951e76deb3641bbf4f6db6933046221d5fbf1c5c",
734
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.obj": "d2b9a5d7c47dc93526082c9b630157ab6bce4fd8669610d942176f4a36444e71",
735
+ },
736
+ handle_found_object=handle_found_object,
737
+ handle_modified_object=handle_modified_object,
738
+ handle_missing_object=handle_missing_object,
739
+ handle_new_object=handle_new_object,
740
+ commit_hash="6928b08a2501aa7a4a4aabac1f888b66e7782056",
741
+ )
742
+
743
+ assert len(out) == 3
744
+ assert len(found_objects) == 3
745
+ assert len(missing_objects) == 0
746
+ assert len(new_objects) == 0
747
+ assert len(modified_objects) == 0
748
+
749
+
750
+ def test_handle_modified_object():
751
+ found_objects = []
752
+ handle_found_object = (
753
+ lambda file, github_url, sha256, repo, organization: found_objects.append(
754
+ dict(
755
+ file=file,
756
+ github_url=github_url,
757
+ sha256=sha256,
758
+ repo=repo,
759
+ organization=organization,
760
+ )
761
+ )
762
+ )
763
+
764
+ missing_objects = []
765
+ handle_missing_object = (
766
+ lambda github_url, sha256, repo, organization: missing_objects.append(
767
+ dict(
768
+ github_url=github_url,
769
+ sha256=sha256,
770
+ repo=repo,
771
+ organization=organization,
772
+ )
773
+ )
774
+ )
775
+
776
+ new_objects = []
777
+ handle_new_object = (
778
+ lambda file, github_url, sha256, repo, organization: new_objects.append(
779
+ dict(
780
+ file=file,
781
+ github_url=github_url,
782
+ sha256=sha256,
783
+ repo=repo,
784
+ organization=organization,
785
+ )
786
+ )
787
+ )
788
+
789
+ modified_objects = []
790
+ handle_modified_object = (
791
+ lambda file, github_url, sha256, repo, organization: modified_objects.append(
792
+ dict(
793
+ file=file,
794
+ github_url=github_url,
795
+ sha256=sha256,
796
+ repo=repo,
797
+ organization=organization,
798
+ )
799
+ )
800
+ )
801
+
802
+ download_dir = "~/.objaverse-tests"
803
+ base_download_dir = os.path.join(download_dir, "github")
804
+ fs, path = fsspec.core.url_to_fs(base_download_dir)
805
+ fs.makedirs(path, exist_ok=True)
806
+
807
+ shutil.rmtree(os.path.join(path, "repos"), ignore_errors=True)
808
+ out = _process_repo(
809
+ repo_id="mattdeitke/objaverse-xl-test-files",
810
+ fs=fs,
811
+ base_dir=path,
812
+ save_repo_format=None,
813
+ expected_objects={
814
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.fbx": "7037575f47816118e5a34e7c0da9927e1be7be3f5b4adfac337710822eb50fa9<modified>",
815
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.glb": "04e6377317d6818e32c5cbd1951e76deb3641bbf4f6db6933046221d5fbf1c5c",
816
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.obj": "d2b9a5d7c47dc93526082c9b630157ab6bce4fd8669610d942176f4a36444e71",
817
+ },
818
+ handle_found_object=handle_found_object,
819
+ handle_modified_object=handle_modified_object,
820
+ handle_missing_object=handle_missing_object,
821
+ handle_new_object=handle_new_object,
822
+ commit_hash="6928b08a2501aa7a4a4aabac1f888b66e7782056",
823
+ )
824
+
825
+ assert len(out) == 3
826
+ assert len(found_objects) == 2
827
+ assert len(missing_objects) == 0
828
+ assert len(new_objects) == 0
829
+ assert len(modified_objects) == 1
830
+
831
+
832
+ def test_handle_missing_object():
833
+ found_objects = []
834
+ handle_found_object = (
835
+ lambda file, github_url, sha256, repo, organization: found_objects.append(
836
+ dict(
837
+ file=file,
838
+ github_url=github_url,
839
+ sha256=sha256,
840
+ repo=repo,
841
+ organization=organization,
842
+ )
843
+ )
844
+ )
845
+
846
+ missing_objects = []
847
+ handle_missing_object = (
848
+ lambda github_url, sha256, repo, organization: missing_objects.append(
849
+ dict(
850
+ github_url=github_url,
851
+ sha256=sha256,
852
+ repo=repo,
853
+ organization=organization,
854
+ )
855
+ )
856
+ )
857
+
858
+ new_objects = []
859
+ handle_new_object = (
860
+ lambda file, github_url, sha256, repo, organization: new_objects.append(
861
+ dict(
862
+ file=file,
863
+ github_url=github_url,
864
+ sha256=sha256,
865
+ repo=repo,
866
+ organization=organization,
867
+ )
868
+ )
869
+ )
870
+
871
+ modified_objects = []
872
+ handle_modified_object = (
873
+ lambda file, github_url, sha256, repo, organization: modified_objects.append(
874
+ dict(
875
+ file=file,
876
+ github_url=github_url,
877
+ sha256=sha256,
878
+ repo=repo,
879
+ organization=organization,
880
+ )
881
+ )
882
+ )
883
+
884
+ download_dir = "~/.objaverse-tests"
885
+ base_download_dir = os.path.join(download_dir, "github")
886
+ fs, path = fsspec.core.url_to_fs(base_download_dir)
887
+ fs.makedirs(path, exist_ok=True)
888
+
889
+ shutil.rmtree(os.path.join(path, "repos"), ignore_errors=True)
890
+ out = _process_repo(
891
+ repo_id="mattdeitke/objaverse-xl-test-files",
892
+ fs=fs,
893
+ base_dir=path,
894
+ save_repo_format=None,
895
+ expected_objects={
896
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.fbx": "7037575f47816118e5a34e7c0da9927e1be7be3f5b4adfac337710822eb50fa9",
897
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example-2.fbx": "<fake-missing-object>",
898
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.glb": "04e6377317d6818e32c5cbd1951e76deb3641bbf4f6db6933046221d5fbf1c5c",
899
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.obj": "d2b9a5d7c47dc93526082c9b630157ab6bce4fd8669610d942176f4a36444e71",
900
+ },
901
+ handle_found_object=handle_found_object,
902
+ handle_modified_object=handle_modified_object,
903
+ handle_missing_object=handle_missing_object,
904
+ handle_new_object=handle_new_object,
905
+ commit_hash="6928b08a2501aa7a4a4aabac1f888b66e7782056",
906
+ )
907
+
908
+ assert len(out) == 3
909
+ assert len(found_objects) == 3
910
+ assert len(missing_objects) == 1
911
+ assert len(new_objects) == 0
912
+ assert len(modified_objects) == 0
913
+
914
+
915
+ def test_handle_missing_object_2():
916
+ found_objects = []
917
+ handle_found_object = (
918
+ lambda file, github_url, sha256, repo, organization: found_objects.append(
919
+ dict(
920
+ file=file,
921
+ github_url=github_url,
922
+ sha256=sha256,
923
+ repo=repo,
924
+ organization=organization,
925
+ )
926
+ )
927
+ )
928
+
929
+ missing_objects = []
930
+ handle_missing_object = (
931
+ lambda github_url, sha256, repo, organization: missing_objects.append(
932
+ dict(
933
+ github_url=github_url,
934
+ sha256=sha256,
935
+ repo=repo,
936
+ organization=organization,
937
+ )
938
+ )
939
+ )
940
+
941
+ new_objects = []
942
+ handle_new_object = (
943
+ lambda file, github_url, sha256, repo, organization: new_objects.append(
944
+ dict(
945
+ file=file,
946
+ github_url=github_url,
947
+ sha256=sha256,
948
+ repo=repo,
949
+ organization=organization,
950
+ )
951
+ )
952
+ )
953
+
954
+ modified_objects = []
955
+ handle_modified_object = (
956
+ lambda file, github_url, sha256, repo, organization: modified_objects.append(
957
+ dict(
958
+ file=file,
959
+ github_url=github_url,
960
+ sha256=sha256,
961
+ repo=repo,
962
+ organization=organization,
963
+ )
964
+ )
965
+ )
966
+
967
+ download_dir = "~/.objaverse-tests"
968
+ base_download_dir = os.path.join(download_dir, "github")
969
+ fs, path = fsspec.core.url_to_fs(base_download_dir)
970
+ fs.makedirs(path, exist_ok=True)
971
+
972
+ shutil.rmtree(os.path.join(path, "repos"), ignore_errors=True)
973
+ out = _process_repo(
974
+ repo_id="mattdeitke/objaverse-xl-test-files-does-not-exist",
975
+ fs=fs,
976
+ base_dir=path,
977
+ save_repo_format=None,
978
+ expected_objects={
979
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.fbx": "7037575f47816118e5a34e7c0da9927e1be7be3f5b4adfac337710822eb50fa9",
980
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example-2.fbx": "<fake-missing-object>",
981
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.glb": "04e6377317d6818e32c5cbd1951e76deb3641bbf4f6db6933046221d5fbf1c5c",
982
+ "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.obj": "d2b9a5d7c47dc93526082c9b630157ab6bce4fd8669610d942176f4a36444e71",
983
+ },
984
+ handle_found_object=handle_found_object,
985
+ handle_modified_object=handle_modified_object,
986
+ handle_missing_object=handle_missing_object,
987
+ handle_new_object=handle_new_object,
988
+ commit_hash="6928b08a2501aa7a4a4aabac1f888b66e7782056",
989
+ )
990
+
991
+ assert len(out) == 0
992
+ assert len(found_objects) == 0
993
+ assert len(missing_objects) == 4
994
+ assert len(new_objects) == 0
995
+ assert len(modified_objects) == 0
996
+
997
+
998
+ def test_download_cache():
999
+ objects = pd.DataFrame(
1000
+ [
1001
+ {
1002
+ "githubUrl": "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.fbx",
1003
+ "license": None,
1004
+ "sha256": "7037575f47816118e5a34e7c0da9927e1be7be3f5b4adfac337710822eb50fa9",
1005
+ },
1006
+ {
1007
+ "githubUrl": "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.glb",
1008
+ "license": None,
1009
+ "sha256": "04e6377317d6818e32c5cbd1951e76deb3641bbf4f6db6933046221d5fbf1c5c",
1010
+ },
1011
+ {
1012
+ "githubUrl": "https://github.com/mattdeitke/objaverse-xl-test-files/blob/6928b08a2501aa7a4a4aabac1f888b66e7782056/example.obj",
1013
+ "license": None,
1014
+ "sha256": "d2b9a5d7c47dc93526082c9b630157ab6bce4fd8669610d942176f4a36444e71",
1015
+ },
1016
+ ]
1017
+ )
1018
+
1019
+ # remove the repos directory
1020
+ for save_repo_format in ["tar", "tar.gz", "zip", "files"]:
1021
+ repos_dir = "~/.objaverse-tests/github/repos"
1022
+ shutil.rmtree(os.path.expanduser(repos_dir), ignore_errors=True)
1023
+
1024
+ out = download_github_objects(
1025
+ objects=objects,
1026
+ processes=1,
1027
+ download_dir="~/.objaverse-tests",
1028
+ save_repo_format=save_repo_format, # type: ignore
1029
+ )
1030
+ assert len(out) == 3
1031
+
1032
+ out = download_github_objects(
1033
+ objects=objects,
1034
+ processes=1,
1035
+ download_dir="~/.objaverse-tests",
1036
+ save_repo_format=save_repo_format, # type: ignore
1037
+ )
1038
+ assert len(out) == 0