# Import the json and os modules | |
import json | |
import os | |
# Define the directory where the json files are located | |
directory = "./test" | |
# Define a function to deduplicate and clean a json file | |
def dedup_and_clean(file): | |
# Open the file in read mode | |
with open(file, "r", encoding="utf-8") as infile: | |
# Load the file as a json object | |
data = json.load(infile) | |
# Create an empty list to store the unique and non-empty items | |
new_data = [] | |
# Create an empty set to store the seen items | |
seen = set() | |
# Loop through each item in the data | |
for item in data: | |
# Convert the item to a string for hashing | |
item_str = json.dumps(item, sort_keys=True) | |
# Check if the item is not empty and not seen before | |
if item_str != "{}" and item_str not in seen: | |
# Add the item to the new data list | |
new_data.append(item) | |
# Add the item string to the seen set | |
seen.add(item_str) | |
# Open the file in write mode | |
with open(file, "w", encoding="utf-8") as outfile: | |
# Dump the new data list as json to the file | |
json.dump(new_data, outfile, indent=4) | |
# Loop through each file in the directory | |
for file in os.listdir(directory): | |
# Check if the file is a json file | |
if file.endswith(".json"): | |
# Deduplicate and clean the json file and print the result | |
print(f"Deduplicating and cleaning {file}...") | |
dedup_and_clean(os.path.join(directory, file)) | |
print(f"{file} is done.") | |