{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# This is a test-script that loads the dataset." ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "# %pip install datasets\n", "from datasets import load_dataset\n", "import pandas as pd\n" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "b56274faa04d46c0a8ce3871242ffc6e", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Downloading readme: 0%| | 0.00/1.37k [00:00, ?B/s]" ] }, "metadata": {}, "output_type": "display_data" }, { "ename": "FileNotFoundError", "evalue": "Couldn't find a dataset script at /Users/amittaijoel/workspace/crawl.hs/data/metadata/siavava/ai-tech-articles/ai-tech-articles.py or any data file in the same directory. Couldn't find 'siavava/ai-tech-articles' on the Hugging Face Hub either: FileNotFoundError: No (supported) data files or dataset script found in siavava/ai-tech-articles. ", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mFileNotFoundError\u001b[0m Traceback (most recent call last)", "\u001b[1;32m/Users/amittaijoel/workspace/crawl.hs/data/metadata/test.ipynb Cell 3\u001b[0m line \u001b[0;36m1\n\u001b[0;32m----> 1\u001b[0m dt \u001b[39m=\u001b[39m load_dataset(\u001b[39m\"\u001b[39;49m\u001b[39msiavava/ai-tech-articles\u001b[39;49m\u001b[39m\"\u001b[39;49m)\n", "File \u001b[0;32m~/miniconda3/envs/data-mining/lib/python3.11/site-packages/datasets/load.py:2129\u001b[0m, in \u001b[0;36mload_dataset\u001b[0;34m(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, ignore_verifications, keep_in_memory, save_infos, revision, token, use_auth_token, task, streaming, num_proc, storage_options, **config_kwargs)\u001b[0m\n\u001b[1;32m 2124\u001b[0m verification_mode \u001b[39m=\u001b[39m VerificationMode(\n\u001b[1;32m 2125\u001b[0m (verification_mode \u001b[39mor\u001b[39;00m VerificationMode\u001b[39m.\u001b[39mBASIC_CHECKS) \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m save_infos \u001b[39melse\u001b[39;00m VerificationMode\u001b[39m.\u001b[39mALL_CHECKS\n\u001b[1;32m 2126\u001b[0m )\n\u001b[1;32m 2128\u001b[0m \u001b[39m# Create a dataset builder\u001b[39;00m\n\u001b[0;32m-> 2129\u001b[0m builder_instance \u001b[39m=\u001b[39m load_dataset_builder(\n\u001b[1;32m 2130\u001b[0m path\u001b[39m=\u001b[39;49mpath,\n\u001b[1;32m 2131\u001b[0m name\u001b[39m=\u001b[39;49mname,\n\u001b[1;32m 2132\u001b[0m data_dir\u001b[39m=\u001b[39;49mdata_dir,\n\u001b[1;32m 2133\u001b[0m data_files\u001b[39m=\u001b[39;49mdata_files,\n\u001b[1;32m 2134\u001b[0m cache_dir\u001b[39m=\u001b[39;49mcache_dir,\n\u001b[1;32m 2135\u001b[0m features\u001b[39m=\u001b[39;49mfeatures,\n\u001b[1;32m 2136\u001b[0m download_config\u001b[39m=\u001b[39;49mdownload_config,\n\u001b[1;32m 2137\u001b[0m download_mode\u001b[39m=\u001b[39;49mdownload_mode,\n\u001b[1;32m 2138\u001b[0m revision\u001b[39m=\u001b[39;49mrevision,\n\u001b[1;32m 2139\u001b[0m token\u001b[39m=\u001b[39;49mtoken,\n\u001b[1;32m 2140\u001b[0m storage_options\u001b[39m=\u001b[39;49mstorage_options,\n\u001b[1;32m 2141\u001b[0m \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mconfig_kwargs,\n\u001b[1;32m 2142\u001b[0m )\n\u001b[1;32m 2144\u001b[0m \u001b[39m# Return iterable dataset in case of streaming\u001b[39;00m\n\u001b[1;32m 2145\u001b[0m \u001b[39mif\u001b[39;00m streaming:\n", "File \u001b[0;32m~/miniconda3/envs/data-mining/lib/python3.11/site-packages/datasets/load.py:1815\u001b[0m, in \u001b[0;36mload_dataset_builder\u001b[0;34m(path, name, data_dir, data_files, cache_dir, features, download_config, download_mode, revision, token, use_auth_token, storage_options, **config_kwargs)\u001b[0m\n\u001b[1;32m 1813\u001b[0m download_config \u001b[39m=\u001b[39m download_config\u001b[39m.\u001b[39mcopy() \u001b[39mif\u001b[39;00m download_config \u001b[39melse\u001b[39;00m DownloadConfig()\n\u001b[1;32m 1814\u001b[0m download_config\u001b[39m.\u001b[39mstorage_options\u001b[39m.\u001b[39mupdate(storage_options)\n\u001b[0;32m-> 1815\u001b[0m dataset_module \u001b[39m=\u001b[39m dataset_module_factory(\n\u001b[1;32m 1816\u001b[0m path,\n\u001b[1;32m 1817\u001b[0m revision\u001b[39m=\u001b[39;49mrevision,\n\u001b[1;32m 1818\u001b[0m download_config\u001b[39m=\u001b[39;49mdownload_config,\n\u001b[1;32m 1819\u001b[0m download_mode\u001b[39m=\u001b[39;49mdownload_mode,\n\u001b[1;32m 1820\u001b[0m data_dir\u001b[39m=\u001b[39;49mdata_dir,\n\u001b[1;32m 1821\u001b[0m data_files\u001b[39m=\u001b[39;49mdata_files,\n\u001b[1;32m 1822\u001b[0m )\n\u001b[1;32m 1823\u001b[0m \u001b[39m# Get dataset builder class from the processing script\u001b[39;00m\n\u001b[1;32m 1824\u001b[0m builder_kwargs \u001b[39m=\u001b[39m dataset_module\u001b[39m.\u001b[39mbuilder_kwargs\n", "File \u001b[0;32m~/miniconda3/envs/data-mining/lib/python3.11/site-packages/datasets/load.py:1508\u001b[0m, in \u001b[0;36mdataset_module_factory\u001b[0;34m(path, revision, download_config, download_mode, dynamic_modules_path, data_dir, data_files, **download_kwargs)\u001b[0m\n\u001b[1;32m 1506\u001b[0m \u001b[39mraise\u001b[39;00m e1 \u001b[39mfrom\u001b[39;00m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 1507\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39misinstance\u001b[39m(e1, \u001b[39mFileNotFoundError\u001b[39;00m):\n\u001b[0;32m-> 1508\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mFileNotFoundError\u001b[39;00m(\n\u001b[1;32m 1509\u001b[0m \u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mCouldn\u001b[39m\u001b[39m'\u001b[39m\u001b[39mt find a dataset script at \u001b[39m\u001b[39m{\u001b[39;00mrelative_to_absolute_path(combined_path)\u001b[39m}\u001b[39;00m\u001b[39m or any data file in the same directory. \u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 1510\u001b[0m \u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mCouldn\u001b[39m\u001b[39m'\u001b[39m\u001b[39mt find \u001b[39m\u001b[39m'\u001b[39m\u001b[39m{\u001b[39;00mpath\u001b[39m}\u001b[39;00m\u001b[39m'\u001b[39m\u001b[39m on the Hugging Face Hub either: \u001b[39m\u001b[39m{\u001b[39;00m\u001b[39mtype\u001b[39m(e1)\u001b[39m.\u001b[39m\u001b[39m__name__\u001b[39m\u001b[39m}\u001b[39;00m\u001b[39m: \u001b[39m\u001b[39m{\u001b[39;00me1\u001b[39m}\u001b[39;00m\u001b[39m\"\u001b[39m\n\u001b[1;32m 1511\u001b[0m ) \u001b[39mfrom\u001b[39;00m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 1512\u001b[0m \u001b[39mraise\u001b[39;00m e1 \u001b[39mfrom\u001b[39;00m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 1513\u001b[0m \u001b[39melse\u001b[39;00m:\n", "\u001b[0;31mFileNotFoundError\u001b[0m: Couldn't find a dataset script at /Users/amittaijoel/workspace/crawl.hs/data/metadata/siavava/ai-tech-articles/ai-tech-articles.py or any data file in the same directory. Couldn't find 'siavava/ai-tech-articles' on the Hugging Face Hub either: FileNotFoundError: No (supported) data files or dataset script found in siavava/ai-tech-articles. " ] } ], "source": [ "dt = load_dataset(\"siavava/ai-tech-articles\")\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", " | id | \n", "year | \n", "title | \n", "url | \n", "text | \n", "__index_level_0__ | \n", "
---|---|---|---|---|---|---|
0 | \n", "0 | \n", "2023.0 | \n", "\"MIT Technology Review\" | \n", "\"https://www.technologyreview.com\" | \n", "\"Featured Topics Newsletters Events Podcasts F... | \n", "0 | \n", "
1 | \n", "1 | \n", "2023.0 | \n", "\"WIRED - The Latest in Technology, Science, Cu... | \n", "\"https://www.wired.com\" | \n", "\"Open Navigation Menu To revisit this article,... | \n", "1 | \n", "
2 | \n", "2 | \n", "2019.0 | \n", "\"The Verge\" | \n", "\"https://www.theverge.com\" | \n", "\"The Verge homepage The Verge The Verge logo.\\... | \n", "2 | \n", "
3 | \n", "3 | \n", "2023.0 | \n", "\"TechCrunch | Startup and Technology News\" | \n", "\"https://www.techcrunch.com\" | \n", "\"WeWork reportedly on the verge of filing for ... | \n", "3 | \n", "
4 | \n", "4 | \n", "2022.0 | \n", "\"A new vision of artificial intelligence for t... | \n", "\"https://www.technologyreview.com/2022/04/22/1... | \n", "\"Featured Topics Newsletters Events Podcasts A... | \n", "4 | \n", "
5 | \n", "5 | \n", "2022.0 | \n", "\"The scientist who co-created CRISPR isn’t rul... | \n", "\"https://www.technologyreview.com/2022/04/26/1... | \n", "\"Featured Topics Newsletters Events Podcasts F... | \n", "5 | \n", "
6 | \n", "6 | \n", "2022.0 | \n", "\"These fast, cheap tests could help us coexist... | \n", "\"https://www.technologyreview.com/2022/04/27/1... | \n", "\"Featured Topics Newsletters Events Podcasts F... | \n", "6 | \n", "
7 | \n", "7 | \n", "2022.0 | \n", "\"Tackling multiple tasks with a single visual ... | \n", "\"http://www.deepmind.com/blog/tackling-multipl... | \n", "\"DeepMind Search Search Close DeepMind About O... | \n", "7 | \n", "
8 | \n", "8 | \n", "2019.0 | \n", "\"About - Google DeepMind\" | \n", "\"https://www.deepmind.com/about\" | \n", "\"DeepMind Search Search Close DeepMind About O... | \n", "8 | \n", "
9 | \n", "9 | \n", "2023.0 | \n", "\"Blog - Google DeepMind\" | \n", "\"https://www.deepmind.com/blog-categories/appl... | \n", "\"DeepMind Search Search Close DeepMind About O... | \n", "9 | \n", "